Compare commits

...

2 Commits

Author SHA1 Message Date
77147dc95b refactor 2026-02-18 17:29:46 +03:00
026239e3bf fix 2026-02-15 11:11:30 +03:00
37 changed files with 66185 additions and 60433 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -12,13 +12,13 @@
# [SECTION: IMPORTS] # [SECTION: IMPORTS]
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from typing import List, Optional from typing import List, Optional, Dict
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from ...dependencies import get_config_manager, get_task_manager, get_resource_service, has_permission from ...dependencies import get_config_manager, get_task_manager, get_resource_service, get_mapping_service, has_permission
from ...core.logger import logger, belief_scope from ...core.logger import logger, belief_scope
# [/SECTION] # [/SECTION]
router = APIRouter() router = APIRouter(prefix="/api/dashboards", tags=["Dashboards"])
# [DEF:GitStatus:DataClass] # [DEF:GitStatus:DataClass]
class GitStatus(BaseModel): class GitStatus(BaseModel):
@@ -47,26 +47,44 @@ class DashboardItem(BaseModel):
class DashboardsResponse(BaseModel): class DashboardsResponse(BaseModel):
dashboards: List[DashboardItem] dashboards: List[DashboardItem]
total: int total: int
page: int
page_size: int
total_pages: int
# [/DEF:DashboardsResponse:DataClass] # [/DEF:DashboardsResponse:DataClass]
# [DEF:get_dashboards:Function] # [DEF:get_dashboards:Function]
# @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status # @PURPOSE: Fetch list of dashboards from a specific environment with Git status and last task status
# @PRE: env_id must be a valid environment ID # @PRE: env_id must be a valid environment ID
# @POST: Returns a list of dashboards with enhanced metadata # @PRE: page must be >= 1 if provided
# @PRE: page_size must be between 1 and 100 if provided
# @POST: Returns a list of dashboards with enhanced metadata and pagination info
# @POST: Response includes pagination metadata (page, page_size, total, total_pages)
# @PARAM: env_id (str) - The environment ID to fetch dashboards from # @PARAM: env_id (str) - The environment ID to fetch dashboards from
# @PARAM: search (Optional[str]) - Filter by title/slug # @PARAM: search (Optional[str]) - Filter by title/slug
# @PARAM: page (Optional[int]) - Page number (default: 1)
# @PARAM: page_size (Optional[int]) - Items per page (default: 10, max: 100)
# @RETURN: DashboardsResponse - List of dashboards with status metadata # @RETURN: DashboardsResponse - List of dashboards with status metadata
# @RELATION: CALLS -> ResourceService.get_dashboards_with_status # @RELATION: CALLS -> ResourceService.get_dashboards_with_status
@router.get("/api/dashboards", response_model=DashboardsResponse) @router.get("", response_model=DashboardsResponse)
async def get_dashboards( async def get_dashboards(
env_id: str, env_id: str,
search: Optional[str] = None, search: Optional[str] = None,
page: int = 1,
page_size: int = 10,
config_manager=Depends(get_config_manager), config_manager=Depends(get_config_manager),
task_manager=Depends(get_task_manager), task_manager=Depends(get_task_manager),
resource_service=Depends(get_resource_service), resource_service=Depends(get_resource_service),
_ = Depends(has_permission("plugin:migration", "READ")) _ = Depends(has_permission("plugin:migration", "READ"))
): ):
with belief_scope("get_dashboards", f"env_id={env_id}, search={search}"): with belief_scope("get_dashboards", f"env_id={env_id}, search={search}, page={page}, page_size={page_size}"):
# Validate pagination parameters
if page < 1:
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page: {page}")
raise HTTPException(status_code=400, detail="Page must be >= 1")
if page_size < 1 or page_size > 100:
logger.error(f"[get_dashboards][Coherence:Failed] Invalid page_size: {page_size}")
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")
# Validate environment exists # Validate environment exists
environments = config_manager.get_environments() environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None) env = next((e for e in environments if e.id == env_id), None)
@@ -90,11 +108,23 @@ async def get_dashboards(
or search_lower in d.get('slug', '').lower() or search_lower in d.get('slug', '').lower()
] ]
logger.info(f"[get_dashboards][Coherence:OK] Returning {len(dashboards)} dashboards") # Calculate pagination
total = len(dashboards)
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
# Slice dashboards for current page
paginated_dashboards = dashboards[start_idx:end_idx]
logger.info(f"[get_dashboards][Coherence:OK] Returning {len(paginated_dashboards)} dashboards (page {page}/{total_pages}, total: {total})")
return DashboardsResponse( return DashboardsResponse(
dashboards=dashboards, dashboards=paginated_dashboards,
total=len(dashboards) total=total,
page=page,
page_size=page_size,
total_pages=total_pages
) )
except Exception as e: except Exception as e:
@@ -102,4 +132,192 @@ async def get_dashboards(
raise HTTPException(status_code=503, detail=f"Failed to fetch dashboards: {str(e)}") raise HTTPException(status_code=503, detail=f"Failed to fetch dashboards: {str(e)}")
# [/DEF:get_dashboards:Function] # [/DEF:get_dashboards:Function]
# [DEF:MigrateRequest:DataClass]
class MigrateRequest(BaseModel):
source_env_id: str = Field(..., description="Source environment ID")
target_env_id: str = Field(..., description="Target environment ID")
dashboard_ids: List[int] = Field(..., description="List of dashboard IDs to migrate")
db_mappings: Optional[Dict[str, str]] = Field(None, description="Database mappings for migration")
replace_db_config: bool = Field(False, description="Replace database configuration")
# [/DEF:MigrateRequest:DataClass]
# [DEF:TaskResponse:DataClass]
class TaskResponse(BaseModel):
task_id: str
# [/DEF:TaskResponse:DataClass]
# [DEF:migrate_dashboards:Function]
# @PURPOSE: Trigger bulk migration of dashboards from source to target environment
# @PRE: User has permission plugin:migration:execute
# @PRE: source_env_id and target_env_id are valid environment IDs
# @PRE: dashboard_ids is a non-empty list
# @POST: Returns task_id for tracking migration progress
# @POST: Task is created and queued for execution
# @PARAM: request (MigrateRequest) - Migration request with source, target, and dashboard IDs
# @RETURN: TaskResponse - Task ID for tracking
# @RELATION: DISPATCHES -> MigrationPlugin
# @RELATION: CALLS -> task_manager.create_task
@router.post("/migrate", response_model=TaskResponse)
async def migrate_dashboards(
request: MigrateRequest,
config_manager=Depends(get_config_manager),
task_manager=Depends(get_task_manager),
_ = Depends(has_permission("plugin:migration", "EXECUTE"))
):
with belief_scope("migrate_dashboards", f"source={request.source_env_id}, target={request.target_env_id}, count={len(request.dashboard_ids)}"):
# Validate request
if not request.dashboard_ids:
logger.error("[migrate_dashboards][Coherence:Failed] No dashboard IDs provided")
raise HTTPException(status_code=400, detail="At least one dashboard ID must be provided")
# Validate environments exist
environments = config_manager.get_environments()
source_env = next((e for e in environments if e.id == request.source_env_id), None)
target_env = next((e for e in environments if e.id == request.target_env_id), None)
if not source_env:
logger.error(f"[migrate_dashboards][Coherence:Failed] Source environment not found: {request.source_env_id}")
raise HTTPException(status_code=404, detail="Source environment not found")
if not target_env:
logger.error(f"[migrate_dashboards][Coherence:Failed] Target environment not found: {request.target_env_id}")
raise HTTPException(status_code=404, detail="Target environment not found")
try:
# Create migration task
task_params = {
'source_env_id': request.source_env_id,
'target_env_id': request.target_env_id,
'dashboards': request.dashboard_ids,
'replace_db_config': request.replace_db_config,
'db_mappings': request.db_mappings or {}
}
task_id = await task_manager.create_task(
plugin_id='superset-migration',
params=task_params
)
logger.info(f"[migrate_dashboards][Coherence:OK] Migration task created: {task_id} for {len(request.dashboard_ids)} dashboards")
return TaskResponse(task_id=str(task_id))
except Exception as e:
logger.error(f"[migrate_dashboards][Coherence:Failed] Failed to create migration task: {e}")
raise HTTPException(status_code=503, detail=f"Failed to create migration task: {str(e)}")
# [/DEF:migrate_dashboards:Function]
# [DEF:BackupRequest:DataClass]
class BackupRequest(BaseModel):
env_id: str = Field(..., description="Environment ID")
dashboard_ids: List[int] = Field(..., description="List of dashboard IDs to backup")
schedule: Optional[str] = Field(None, description="Cron schedule for recurring backups (e.g., '0 0 * * *')")
# [/DEF:BackupRequest:DataClass]
# [DEF:backup_dashboards:Function]
# @PURPOSE: Trigger bulk backup of dashboards with optional cron schedule
# @PRE: User has permission plugin:backup:execute
# @PRE: env_id is a valid environment ID
# @PRE: dashboard_ids is a non-empty list
# @POST: Returns task_id for tracking backup progress
# @POST: Task is created and queued for execution
# @POST: If schedule is provided, a scheduled task is created
# @PARAM: request (BackupRequest) - Backup request with environment and dashboard IDs
# @RETURN: TaskResponse - Task ID for tracking
# @RELATION: DISPATCHES -> BackupPlugin
# @RELATION: CALLS -> task_manager.create_task
@router.post("/backup", response_model=TaskResponse)
async def backup_dashboards(
request: BackupRequest,
config_manager=Depends(get_config_manager),
task_manager=Depends(get_task_manager),
_ = Depends(has_permission("plugin:backup", "EXECUTE"))
):
with belief_scope("backup_dashboards", f"env={request.env_id}, count={len(request.dashboard_ids)}, schedule={request.schedule}"):
# Validate request
if not request.dashboard_ids:
logger.error("[backup_dashboards][Coherence:Failed] No dashboard IDs provided")
raise HTTPException(status_code=400, detail="At least one dashboard ID must be provided")
# Validate environment exists
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == request.env_id), None)
if not env:
logger.error(f"[backup_dashboards][Coherence:Failed] Environment not found: {request.env_id}")
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Create backup task
task_params = {
'env': request.env_id,
'dashboards': request.dashboard_ids,
'schedule': request.schedule
}
task_id = await task_manager.create_task(
plugin_id='superset-backup',
params=task_params
)
logger.info(f"[backup_dashboards][Coherence:OK] Backup task created: {task_id} for {len(request.dashboard_ids)} dashboards")
return TaskResponse(task_id=str(task_id))
except Exception as e:
logger.error(f"[backup_dashboards][Coherence:Failed] Failed to create backup task: {e}")
raise HTTPException(status_code=503, detail=f"Failed to create backup task: {str(e)}")
# [/DEF:backup_dashboards:Function]
# [DEF:DatabaseMapping:DataClass]
class DatabaseMapping(BaseModel):
source_db: str
target_db: str
confidence: float
# [/DEF:DatabaseMapping:DataClass]
# [DEF:DatabaseMappingsResponse:DataClass]
class DatabaseMappingsResponse(BaseModel):
mappings: List[DatabaseMapping]
# [/DEF:DatabaseMappingsResponse:DataClass]
# [DEF:get_database_mappings:Function]
# @PURPOSE: Get database mapping suggestions between source and target environments
# @PRE: User has permission plugin:migration:read
# @PRE: source_env_id and target_env_id are valid environment IDs
# @POST: Returns list of suggested database mappings with confidence scores
# @PARAM: source_env_id (str) - Source environment ID
# @PARAM: target_env_id (str) - Target environment ID
# @RETURN: DatabaseMappingsResponse - List of suggested mappings
# @RELATION: CALLS -> MappingService.get_suggestions
@router.get("/db-mappings", response_model=DatabaseMappingsResponse)
async def get_database_mappings(
source_env_id: str,
target_env_id: str,
mapping_service=Depends(get_mapping_service),
_ = Depends(has_permission("plugin:migration", "READ"))
):
with belief_scope("get_database_mappings", f"source={source_env_id}, target={target_env_id}"):
try:
# Get mapping suggestions using MappingService
suggestions = await mapping_service.get_suggestions(source_env_id, target_env_id)
# Format suggestions as DatabaseMapping objects
mappings = [
DatabaseMapping(
source_db=s.get('source_db', ''),
target_db=s.get('target_db', ''),
confidence=s.get('confidence', 0.0)
)
for s in suggestions
]
logger.info(f"[get_database_mappings][Coherence:OK] Returning {len(mappings)} database mapping suggestions")
return DatabaseMappingsResponse(mappings=mappings)
except Exception as e:
logger.error(f"[get_database_mappings][Coherence:Failed] Failed to get database mappings: {e}")
raise HTTPException(status_code=503, detail=f"Failed to get database mappings: {str(e)}")
# [/DEF:get_database_mappings:Function]
# [/DEF:backend.src.api.routes.dashboards:Module] # [/DEF:backend.src.api.routes.dashboards:Module]

View File

@@ -16,9 +16,10 @@ from typing import List, Optional
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from ...dependencies import get_config_manager, get_task_manager, get_resource_service, has_permission from ...dependencies import get_config_manager, get_task_manager, get_resource_service, has_permission
from ...core.logger import logger, belief_scope from ...core.logger import logger, belief_scope
from ...core.superset_client import SupersetClient
# [/SECTION] # [/SECTION]
router = APIRouter() router = APIRouter(prefix="/api/datasets", tags=["Datasets"])
# [DEF:MappedFields:DataClass] # [DEF:MappedFields:DataClass]
class MappedFields(BaseModel): class MappedFields(BaseModel):
@@ -42,22 +43,64 @@ class DatasetItem(BaseModel):
last_task: Optional[LastTask] = None last_task: Optional[LastTask] = None
# [/DEF:DatasetItem:DataClass] # [/DEF:DatasetItem:DataClass]
# [DEF:LinkedDashboard:DataClass]
class LinkedDashboard(BaseModel):
id: int
title: str
slug: Optional[str] = None
# [/DEF:LinkedDashboard:DataClass]
# [DEF:DatasetColumn:DataClass]
class DatasetColumn(BaseModel):
id: int
name: str
type: Optional[str] = None
is_dttm: bool = False
is_active: bool = True
description: Optional[str] = None
# [/DEF:DatasetColumn:DataClass]
# [DEF:DatasetDetailResponse:DataClass]
class DatasetDetailResponse(BaseModel):
id: int
table_name: Optional[str] = None
schema: Optional[str] = None
database: str
description: Optional[str] = None
columns: List[DatasetColumn]
column_count: int
sql: Optional[str] = None
linked_dashboards: List[LinkedDashboard]
linked_dashboard_count: int
is_sqllab_view: bool = False
created_on: Optional[str] = None
changed_on: Optional[str] = None
# [/DEF:DatasetDetailResponse:DataClass]
# [DEF:DatasetsResponse:DataClass] # [DEF:DatasetsResponse:DataClass]
class DatasetsResponse(BaseModel): class DatasetsResponse(BaseModel):
datasets: List[DatasetItem] datasets: List[DatasetItem]
total: int total: int
page: int
page_size: int
total_pages: int
# [/DEF:DatasetsResponse:DataClass] # [/DEF:DatasetsResponse:DataClass]
# [DEF:get_datasets:Function] # [DEF:TaskResponse:DataClass]
# @PURPOSE: Fetch list of datasets from a specific environment with mapping progress class TaskResponse(BaseModel):
# @PRE: env_id must be a valid environment ID task_id: str
# @POST: Returns a list of datasets with enhanced metadata # [/DEF:TaskResponse:DataClass]
# @PARAM: env_id (str) - The environment ID to fetch datasets from
# @PARAM: search (Optional[str]) - Filter by table name # [DEF:get_dataset_ids:Function]
# @RETURN: DatasetsResponse - List of datasets with status metadata # @PURPOSE: Fetch list of all dataset IDs from a specific environment (without pagination)
# @PRE: env_id must be a valid environment ID
# @POST: Returns a list of all dataset IDs
# @PARAM: env_id (str) - The environment ID to fetch datasets from
# @PARAM: search (Optional[str]) - Filter by table name
# @RETURN: List[int] - List of dataset IDs
# @RELATION: CALLS -> ResourceService.get_datasets_with_status # @RELATION: CALLS -> ResourceService.get_datasets_with_status
@router.get("/api/datasets", response_model=DatasetsResponse) @router.get("/ids")
async def get_datasets( async def get_dataset_ids(
env_id: str, env_id: str,
search: Optional[str] = None, search: Optional[str] = None,
config_manager=Depends(get_config_manager), config_manager=Depends(get_config_manager),
@@ -65,7 +108,73 @@ async def get_datasets(
resource_service=Depends(get_resource_service), resource_service=Depends(get_resource_service),
_ = Depends(has_permission("plugin:migration", "READ")) _ = Depends(has_permission("plugin:migration", "READ"))
): ):
with belief_scope("get_datasets", f"env_id={env_id}, search={search}"): with belief_scope("get_dataset_ids", f"env_id={env_id}, search={search}"):
# Validate environment exists
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None)
if not env:
logger.error(f"[get_dataset_ids][Coherence:Failed] Environment not found: {env_id}")
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Get all tasks for status lookup
all_tasks = task_manager.get_all_tasks()
# Fetch datasets with status using ResourceService
datasets = await resource_service.get_datasets_with_status(env, all_tasks)
# Apply search filter if provided
if search:
search_lower = search.lower()
datasets = [
d for d in datasets
if search_lower in d.get('table_name', '').lower()
]
# Extract and return just the IDs
dataset_ids = [d['id'] for d in datasets]
logger.info(f"[get_dataset_ids][Coherence:OK] Returning {len(dataset_ids)} dataset IDs")
return {"dataset_ids": dataset_ids}
except Exception as e:
logger.error(f"[get_dataset_ids][Coherence:Failed] Failed to fetch dataset IDs: {e}")
raise HTTPException(status_code=503, detail=f"Failed to fetch dataset IDs: {str(e)}")
# [/DEF:get_dataset_ids:Function]
# [DEF:get_datasets:Function]
# @PURPOSE: Fetch list of datasets from a specific environment with mapping progress
# @PRE: env_id must be a valid environment ID
# @PRE: page must be >= 1 if provided
# @PRE: page_size must be between 1 and 100 if provided
# @POST: Returns a list of datasets with enhanced metadata and pagination info
# @POST: Response includes pagination metadata (page, page_size, total, total_pages)
# @PARAM: env_id (str) - The environment ID to fetch datasets from
# @PARAM: search (Optional[str]) - Filter by table name
# @PARAM: page (Optional[int]) - Page number (default: 1)
# @PARAM: page_size (Optional[int]) - Items per page (default: 10, max: 100)
# @RETURN: DatasetsResponse - List of datasets with status metadata
# @RELATION: CALLS -> ResourceService.get_datasets_with_status
@router.get("", response_model=DatasetsResponse)
async def get_datasets(
env_id: str,
search: Optional[str] = None,
page: int = 1,
page_size: int = 10,
config_manager=Depends(get_config_manager),
task_manager=Depends(get_task_manager),
resource_service=Depends(get_resource_service),
_ = Depends(has_permission("plugin:migration", "READ"))
):
with belief_scope("get_datasets", f"env_id={env_id}, search={search}, page={page}, page_size={page_size}"):
# Validate pagination parameters
if page < 1:
logger.error(f"[get_datasets][Coherence:Failed] Invalid page: {page}")
raise HTTPException(status_code=400, detail="Page must be >= 1")
if page_size < 1 or page_size > 100:
logger.error(f"[get_datasets][Coherence:Failed] Invalid page_size: {page_size}")
raise HTTPException(status_code=400, detail="Page size must be between 1 and 100")
# Validate environment exists # Validate environment exists
environments = config_manager.get_environments() environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None) env = next((e for e in environments if e.id == env_id), None)
@@ -88,11 +197,23 @@ async def get_datasets(
if search_lower in d.get('table_name', '').lower() if search_lower in d.get('table_name', '').lower()
] ]
logger.info(f"[get_datasets][Coherence:OK] Returning {len(datasets)} datasets") # Calculate pagination
total = len(datasets)
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
# Slice datasets for current page
paginated_datasets = datasets[start_idx:end_idx]
logger.info(f"[get_datasets][Coherence:OK] Returning {len(paginated_datasets)} datasets (page {page}/{total_pages}, total: {total})")
return DatasetsResponse( return DatasetsResponse(
datasets=datasets, datasets=paginated_datasets,
total=len(datasets) total=total,
page=page,
page_size=page_size,
total_pages=total_pages
) )
except Exception as e: except Exception as e:
@@ -100,4 +221,175 @@ async def get_datasets(
raise HTTPException(status_code=503, detail=f"Failed to fetch datasets: {str(e)}") raise HTTPException(status_code=503, detail=f"Failed to fetch datasets: {str(e)}")
# [/DEF:get_datasets:Function] # [/DEF:get_datasets:Function]
# [DEF:MapColumnsRequest:DataClass]
class MapColumnsRequest(BaseModel):
env_id: str = Field(..., description="Environment ID")
dataset_ids: List[int] = Field(..., description="List of dataset IDs to map")
source_type: str = Field(..., description="Source type: 'postgresql' or 'xlsx'")
connection_id: Optional[str] = Field(None, description="Connection ID for PostgreSQL source")
file_data: Optional[str] = Field(None, description="File path or data for XLSX source")
# [/DEF:MapColumnsRequest:DataClass]
# [DEF:map_columns:Function]
# @PURPOSE: Trigger bulk column mapping for datasets
# @PRE: User has permission plugin:mapper:execute
# @PRE: env_id is a valid environment ID
# @PRE: dataset_ids is a non-empty list
# @POST: Returns task_id for tracking mapping progress
# @POST: Task is created and queued for execution
# @PARAM: request (MapColumnsRequest) - Mapping request with environment and dataset IDs
# @RETURN: TaskResponse - Task ID for tracking
# @RELATION: DISPATCHES -> MapperPlugin
# @RELATION: CALLS -> task_manager.create_task
@router.post("/map-columns", response_model=TaskResponse)
async def map_columns(
request: MapColumnsRequest,
config_manager=Depends(get_config_manager),
task_manager=Depends(get_task_manager),
_ = Depends(has_permission("plugin:mapper", "EXECUTE"))
):
with belief_scope("map_columns", f"env={request.env_id}, count={len(request.dataset_ids)}, source={request.source_type}"):
# Validate request
if not request.dataset_ids:
logger.error("[map_columns][Coherence:Failed] No dataset IDs provided")
raise HTTPException(status_code=400, detail="At least one dataset ID must be provided")
# Validate source type
if request.source_type not in ['postgresql', 'xlsx']:
logger.error(f"[map_columns][Coherence:Failed] Invalid source type: {request.source_type}")
raise HTTPException(status_code=400, detail="Source type must be 'postgresql' or 'xlsx'")
# Validate environment exists
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == request.env_id), None)
if not env:
logger.error(f"[map_columns][Coherence:Failed] Environment not found: {request.env_id}")
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Create mapping task
task_params = {
'env_id': request.env_id,
'datasets': request.dataset_ids,
'source_type': request.source_type,
'connection_id': request.connection_id,
'file_data': request.file_data
}
task_id = await task_manager.create_task(
plugin_id='dataset-mapper',
params=task_params
)
logger.info(f"[map_columns][Coherence:OK] Mapping task created: {task_id} for {len(request.dataset_ids)} datasets")
return TaskResponse(task_id=str(task_id))
except Exception as e:
logger.error(f"[map_columns][Coherence:Failed] Failed to create mapping task: {e}")
raise HTTPException(status_code=503, detail=f"Failed to create mapping task: {str(e)}")
# [/DEF:map_columns:Function]
# [DEF:GenerateDocsRequest:DataClass]
class GenerateDocsRequest(BaseModel):
env_id: str = Field(..., description="Environment ID")
dataset_ids: List[int] = Field(..., description="List of dataset IDs to generate docs for")
llm_provider: str = Field(..., description="LLM provider to use")
options: Optional[dict] = Field(None, description="Additional options for documentation generation")
# [/DEF:GenerateDocsRequest:DataClass]
# [DEF:generate_docs:Function]
# @PURPOSE: Trigger bulk documentation generation for datasets
# @PRE: User has permission plugin:llm_analysis:execute
# @PRE: env_id is a valid environment ID
# @PRE: dataset_ids is a non-empty list
# @POST: Returns task_id for tracking documentation generation progress
# @POST: Task is created and queued for execution
# @PARAM: request (GenerateDocsRequest) - Documentation generation request
# @RETURN: TaskResponse - Task ID for tracking
# @RELATION: DISPATCHES -> LLMAnalysisPlugin
# @RELATION: CALLS -> task_manager.create_task
@router.post("/generate-docs", response_model=TaskResponse)
async def generate_docs(
request: GenerateDocsRequest,
config_manager=Depends(get_config_manager),
task_manager=Depends(get_task_manager),
_ = Depends(has_permission("plugin:llm_analysis", "EXECUTE"))
):
with belief_scope("generate_docs", f"env={request.env_id}, count={len(request.dataset_ids)}, provider={request.llm_provider}"):
# Validate request
if not request.dataset_ids:
logger.error("[generate_docs][Coherence:Failed] No dataset IDs provided")
raise HTTPException(status_code=400, detail="At least one dataset ID must be provided")
# Validate environment exists
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == request.env_id), None)
if not env:
logger.error(f"[generate_docs][Coherence:Failed] Environment not found: {request.env_id}")
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Create documentation generation task
task_params = {
'env_id': request.env_id,
'datasets': request.dataset_ids,
'llm_provider': request.llm_provider,
'options': request.options or {}
}
task_id = await task_manager.create_task(
plugin_id='llm_documentation',
params=task_params
)
logger.info(f"[generate_docs][Coherence:OK] Documentation generation task created: {task_id} for {len(request.dataset_ids)} datasets")
return TaskResponse(task_id=str(task_id))
except Exception as e:
logger.error(f"[generate_docs][Coherence:Failed] Failed to create documentation generation task: {e}")
raise HTTPException(status_code=503, detail=f"Failed to create documentation generation task: {str(e)}")
# [/DEF:generate_docs:Function]
# [DEF:get_dataset_detail:Function]
# @PURPOSE: Get detailed dataset information including columns and linked dashboards
# @PRE: env_id is a valid environment ID
# @PRE: dataset_id is a valid dataset ID
# @POST: Returns detailed dataset info with columns and linked dashboards
# @PARAM: env_id (str) - The environment ID
# @PARAM: dataset_id (int) - The dataset ID
# @RETURN: DatasetDetailResponse - Detailed dataset information
# @RELATION: CALLS -> SupersetClient.get_dataset_detail
@router.get("/{dataset_id}", response_model=DatasetDetailResponse)
async def get_dataset_detail(
env_id: str,
dataset_id: int,
config_manager=Depends(get_config_manager),
_ = Depends(has_permission("plugin:migration", "READ"))
):
with belief_scope("get_dataset_detail", f"env_id={env_id}, dataset_id={dataset_id}"):
# Validate environment exists
environments = config_manager.get_environments()
env = next((e for e in environments if e.id == env_id), None)
if not env:
logger.error(f"[get_dataset_detail][Coherence:Failed] Environment not found: {env_id}")
raise HTTPException(status_code=404, detail="Environment not found")
try:
# Fetch detailed dataset info using SupersetClient
client = SupersetClient(env)
dataset_detail = client.get_dataset_detail(dataset_id)
logger.info(f"[get_dataset_detail][Coherence:OK] Retrieved dataset {dataset_id} with {dataset_detail['column_count']} columns and {dataset_detail['linked_dashboard_count']} linked dashboards")
return DatasetDetailResponse(**dataset_detail)
except Exception as e:
logger.error(f"[get_dataset_detail][Coherence:Failed] Failed to fetch dataset detail: {e}")
raise HTTPException(status_code=503, detail=f"Failed to fetch dataset detail: {str(e)}")
# [/DEF:get_dataset_detail:Function]
# [/DEF:backend.src.api.routes.datasets:Module] # [/DEF:backend.src.api.routes.datasets:Module]

View File

@@ -18,7 +18,7 @@ from pydantic import BaseModel, Field
from ...core.logger import belief_scope from ...core.logger import belief_scope
# [/SECTION] # [/SECTION]
router = APIRouter() router = APIRouter(prefix="/api/environments", tags=["Environments"])
# [DEF:ScheduleSchema:DataClass] # [DEF:ScheduleSchema:DataClass]
class ScheduleSchema(BaseModel): class ScheduleSchema(BaseModel):
@@ -43,6 +43,8 @@ class DatabaseResponse(BaseModel):
# [DEF:get_environments:Function] # [DEF:get_environments:Function]
# @PURPOSE: List all configured environments. # @PURPOSE: List all configured environments.
# @LAYER: API
# @SEMANTICS: list, environments, config
# @PRE: config_manager is injected via Depends. # @PRE: config_manager is injected via Depends.
# @POST: Returns a list of EnvironmentResponse objects. # @POST: Returns a list of EnvironmentResponse objects.
# @RETURN: List[EnvironmentResponse] # @RETURN: List[EnvironmentResponse]
@@ -71,6 +73,8 @@ async def get_environments(
# [DEF:update_environment_schedule:Function] # [DEF:update_environment_schedule:Function]
# @PURPOSE: Update backup schedule for an environment. # @PURPOSE: Update backup schedule for an environment.
# @LAYER: API
# @SEMANTICS: update, schedule, backup, environment
# @PRE: Environment id exists, schedule is valid ScheduleSchema. # @PRE: Environment id exists, schedule is valid ScheduleSchema.
# @POST: Backup schedule updated and scheduler reloaded. # @POST: Backup schedule updated and scheduler reloaded.
# @PARAM: id (str) - The environment ID. # @PARAM: id (str) - The environment ID.
@@ -103,6 +107,8 @@ async def update_environment_schedule(
# [DEF:get_environment_databases:Function] # [DEF:get_environment_databases:Function]
# @PURPOSE: Fetch the list of databases from a specific environment. # @PURPOSE: Fetch the list of databases from a specific environment.
# @LAYER: API
# @SEMANTICS: fetch, databases, superset, environment
# @PRE: Environment id exists. # @PRE: Environment id exists.
# @POST: Returns a list of database summaries from the environment. # @POST: Returns a list of database summaries from the environment.
# @PARAM: id (str) - The environment ID. # @PARAM: id (str) - The environment ID.

View File

@@ -26,7 +26,7 @@ from src.api.routes.git_schemas import (
from src.services.git_service import GitService from src.services.git_service import GitService
from src.core.logger import logger, belief_scope from src.core.logger import logger, belief_scope
router = APIRouter(prefix="/api/git", tags=["git"]) router = APIRouter(tags=["git"])
git_service = GitService() git_service = GitService()
# [DEF:get_git_configs:Function] # [DEF:get_git_configs:Function]

View File

@@ -16,7 +16,7 @@ from sqlalchemy.orm import Session
# [DEF:router:Global] # [DEF:router:Global]
# @PURPOSE: APIRouter instance for LLM routes. # @PURPOSE: APIRouter instance for LLM routes.
router = APIRouter(prefix="/api/llm", tags=["LLM"]) router = APIRouter(tags=["LLM"])
# [/DEF:router:Global] # [/DEF:router:Global]
# [DEF:get_providers:Function] # [DEF:get_providers:Function]

View File

@@ -21,7 +21,7 @@ from ...models.mapping import DatabaseMapping
from pydantic import BaseModel from pydantic import BaseModel
# [/SECTION] # [/SECTION]
router = APIRouter(prefix="/api/mappings", tags=["mappings"]) router = APIRouter(tags=["mappings"])
# [DEF:MappingCreate:DataClass] # [DEF:MappingCreate:DataClass]
class MappingCreate(BaseModel): class MappingCreate(BaseModel):

View File

@@ -44,7 +44,7 @@ async def get_dashboards(
# @POST: Starts the migration task and returns the task ID. # @POST: Starts the migration task and returns the task ID.
# @PARAM: selection (DashboardSelection) - The dashboards to migrate. # @PARAM: selection (DashboardSelection) - The dashboards to migrate.
# @RETURN: Dict - {"task_id": str, "message": str} # @RETURN: Dict - {"task_id": str, "message": str}
@router.post("/migration/execute") @router.post("/execute")
async def execute_migration( async def execute_migration(
selection: DashboardSelection, selection: DashboardSelection,
config_manager=Depends(get_config_manager), config_manager=Depends(get_config_manager),

View File

@@ -283,6 +283,7 @@ class ConsolidatedSettingsResponse(BaseModel):
environments: List[dict] environments: List[dict]
connections: List[dict] connections: List[dict]
llm: dict llm: dict
llm_providers: List[dict]
logging: dict logging: dict
storage: dict storage: dict
# [/DEF:ConsolidatedSettingsResponse:Class] # [/DEF:ConsolidatedSettingsResponse:Class]
@@ -302,13 +303,74 @@ async def get_consolidated_settings(
config = config_manager.get_config() config = config_manager.get_config()
from ...services.llm_provider import LLMProviderService
from ...core.database import SessionLocal
db = SessionLocal()
try:
llm_service = LLMProviderService(db)
providers = llm_service.get_all_providers()
llm_providers_list = [
{
"id": p.id,
"provider_type": p.provider_type,
"name": p.name,
"base_url": p.base_url,
"api_key": "********",
"default_model": p.default_model,
"is_active": p.is_active
} for p in providers
]
finally:
db.close()
return ConsolidatedSettingsResponse( return ConsolidatedSettingsResponse(
environments=config.environments, environments=[env.dict() for env in config.environments],
connections=config.settings.connections, connections=config.settings.connections,
llm=config.settings.llm, llm=config.settings.llm,
logging=config.settings.logging, llm_providers=llm_providers_list,
storage=config.settings.storage logging=config.settings.logging.dict(),
storage=config.settings.storage.dict()
) )
# [/DEF:get_consolidated_settings:Function] # [/DEF:get_consolidated_settings:Function]
# [DEF:update_consolidated_settings:Function]
# @PURPOSE: Bulk update application settings from the consolidated view.
# @PRE: User has admin permissions, config is valid.
# @POST: Settings are updated and saved via ConfigManager.
@router.patch("/consolidated")
async def update_consolidated_settings(
settings_patch: dict,
config_manager: ConfigManager = Depends(get_config_manager),
_ = Depends(has_permission("admin:settings", "WRITE"))
):
with belief_scope("update_consolidated_settings"):
logger.info("[update_consolidated_settings][Entry] Applying consolidated settings patch")
current_config = config_manager.get_config()
current_settings = current_config.settings
# Update connections if provided
if "connections" in settings_patch:
current_settings.connections = settings_patch["connections"]
# Update LLM if provided
if "llm" in settings_patch:
current_settings.llm = settings_patch["llm"]
# Update Logging if provided
if "logging" in settings_patch:
current_settings.logging = LoggingConfig(**settings_patch["logging"])
# Update Storage if provided
if "storage" in settings_patch:
new_storage = StorageConfig(**settings_patch["storage"])
is_valid, message = config_manager.validate_path(new_storage.root_path)
if not is_valid:
raise HTTPException(status_code=400, detail=message)
current_settings.storage = new_storage
config_manager.update_global_settings(current_settings)
return {"status": "success", "message": "Settings updated"}
# [/DEF:update_consolidated_settings:Function]
# [/DEF:SettingsRouter:Module] # [/DEF:SettingsRouter:Module]

View File

@@ -115,14 +115,21 @@ app.include_router(plugins.router, prefix="/api/plugins", tags=["Plugins"])
app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"]) app.include_router(tasks.router, prefix="/api/tasks", tags=["Tasks"])
app.include_router(settings.router, prefix="/api/settings", tags=["Settings"]) app.include_router(settings.router, prefix="/api/settings", tags=["Settings"])
app.include_router(connections.router, prefix="/api/settings/connections", tags=["Connections"]) app.include_router(connections.router, prefix="/api/settings/connections", tags=["Connections"])
app.include_router(environments.router, prefix="/api/environments", tags=["Environments"]) app.include_router(environments.router, tags=["Environments"])
app.include_router(mappings.router) app.include_router(mappings.router, prefix="/api/mappings", tags=["Mappings"])
app.include_router(migration.router) app.include_router(migration.router)
app.include_router(git.router) app.include_router(git.router, prefix="/api/git", tags=["Git"])
app.include_router(llm.router) app.include_router(llm.router, prefix="/api/llm", tags=["LLM"])
app.include_router(storage.router, prefix="/api/storage", tags=["Storage"]) app.include_router(storage.router, prefix="/api/storage", tags=["Storage"])
app.include_router(dashboards.router, tags=["Dashboards"]) app.include_router(dashboards.router)
app.include_router(datasets.router, tags=["Datasets"]) app.include_router(datasets.router)
# [DEF:api.include_routers:Action]
# @PURPOSE: Registers all API routers with the FastAPI application.
# @LAYER: API
# @SEMANTICS: routes, registration, api
# [/DEF:api.include_routers:Action]
# [DEF:websocket_endpoint:Function] # [DEF:websocket_endpoint:Function]
# @PURPOSE: Provides a WebSocket endpoint for real-time log streaming of a task with server-side filtering. # @PURPOSE: Provides a WebSocket endpoint for real-time log streaming of a task with server-side filtering.
@@ -234,24 +241,19 @@ frontend_path = project_root / "frontend" / "build"
if frontend_path.exists(): if frontend_path.exists():
app.mount("/_app", StaticFiles(directory=str(frontend_path / "_app")), name="static") app.mount("/_app", StaticFiles(directory=str(frontend_path / "_app")), name="static")
# Serve other static files from the root of build directory @app.get("/{file_path:path}", include_in_schema=False)
# [DEF:serve_spa:Function]
# @PURPOSE: Serves frontend static files or index.html for SPA routing.
# @PRE: file_path is requested by the client.
# @POST: Returns the requested file or index.html as a fallback.
@app.get("/{file_path:path}")
async def serve_spa(file_path: str): async def serve_spa(file_path: str):
with belief_scope("serve_spa", f"path={file_path}"): # Only serve SPA for non-API paths
# Don't serve SPA for API routes that fell through # API routes are registered separately and should be matched by FastAPI first
if file_path.startswith("api/"): if file_path and (file_path.startswith("api/") or file_path.startswith("/api/") or file_path == "api"):
logger.info(f"[DEBUG] API route fell through to serve_spa: {file_path}") # This should not happen if API routers are properly registered
raise HTTPException(status_code=404, detail=f"API endpoint not found: {file_path}") # Return 404 instead of serving HTML
raise HTTPException(status_code=404, detail=f"API endpoint not found: {file_path}")
full_path = frontend_path / file_path full_path = frontend_path / file_path
if full_path.is_file(): if file_path and full_path.is_file():
return FileResponse(str(full_path)) return FileResponse(str(full_path))
# Fallback to index.html for SPA routing return FileResponse(str(frontend_path / "index.html"))
return FileResponse(str(frontend_path / "index.html"))
# [/DEF:serve_spa:Function] # [/DEF:serve_spa:Function]
else: else:
# [DEF:read_root:Function] # [DEF:read_root:Function]

View File

@@ -48,6 +48,8 @@ class GlobalSettings(BaseModel):
storage: StorageConfig = Field(default_factory=StorageConfig) storage: StorageConfig = Field(default_factory=StorageConfig)
default_environment_id: Optional[str] = None default_environment_id: Optional[str] = None
logging: LoggingConfig = Field(default_factory=LoggingConfig) logging: LoggingConfig = Field(default_factory=LoggingConfig)
connections: List[dict] = []
llm: dict = Field(default_factory=lambda: {"providers": [], "default_provider": ""})
# Task retention settings # Task retention settings
task_retention_days: int = 30 task_retention_days: int = 30

View File

@@ -236,6 +236,88 @@ class SupersetClient:
return result return result
# [/DEF:get_datasets_summary:Function] # [/DEF:get_datasets_summary:Function]
# [DEF:get_dataset_detail:Function]
# @PURPOSE: Fetches detailed dataset information including columns and linked dashboards
# @PRE: Client is authenticated and dataset_id exists.
# @POST: Returns detailed dataset info with columns and linked dashboards.
# @PARAM: dataset_id (int) - The dataset ID to fetch details for.
# @RETURN: Dict - Dataset details with columns and linked_dashboards.
# @RELATION: CALLS -> self.get_dataset
# @RELATION: CALLS -> self.network.request (for related_objects)
def get_dataset_detail(self, dataset_id: int) -> Dict:
with belief_scope("SupersetClient.get_dataset_detail", f"id={dataset_id}"):
# Get base dataset info
response = self.get_dataset(dataset_id)
# If the response is a dict and has a 'result' key, use that (standard Superset API)
if isinstance(response, dict) and 'result' in response:
dataset = response['result']
else:
dataset = response
# Extract columns information
columns = dataset.get("columns", [])
column_info = []
for col in columns:
column_info.append({
"id": col.get("id"),
"name": col.get("column_name"),
"type": col.get("type"),
"is_dttm": col.get("is_dttm", False),
"is_active": col.get("is_active", True),
"description": col.get("description", "")
})
# Get linked dashboards using related_objects endpoint
linked_dashboards = []
try:
related_objects = self.network.request(
method="GET",
endpoint=f"/dataset/{dataset_id}/related_objects"
)
# Handle different response formats
if isinstance(related_objects, dict):
if "dashboards" in related_objects:
dashboards_data = related_objects["dashboards"]
elif "result" in related_objects and isinstance(related_objects["result"], dict):
dashboards_data = related_objects["result"].get("dashboards", [])
else:
dashboards_data = []
for dash in dashboards_data:
linked_dashboards.append({
"id": dash.get("id"),
"title": dash.get("dashboard_title") or dash.get("title", "Unknown"),
"slug": dash.get("slug")
})
except Exception as e:
app_logger.warning(f"[get_dataset_detail][Warning] Failed to fetch related dashboards: {e}")
linked_dashboards = []
# Extract SQL table information
sql = dataset.get("sql", "")
result = {
"id": dataset.get("id"),
"table_name": dataset.get("table_name"),
"schema": dataset.get("schema"),
"database": dataset.get("database", {}).get("database_name", "Unknown"),
"description": dataset.get("description", ""),
"columns": column_info,
"column_count": len(column_info),
"sql": sql,
"linked_dashboards": linked_dashboards,
"linked_dashboard_count": len(linked_dashboards),
"is_sqllab_view": dataset.get("is_sqllab_view", False),
"created_on": dataset.get("created_on"),
"changed_on": dataset.get("changed_on")
}
app_logger.info(f"[get_dataset_detail][Exit] Got dataset {dataset_id} with {len(column_info)} columns and {len(linked_dashboards)} linked dashboards")
return result
# [/DEF:get_dataset_detail:Function]
# [DEF:get_dataset:Function] # [DEF:get_dataset:Function]
# @PURPOSE: Получает информацию о конкретном датасете по его ID. # @PURPOSE: Получает информацию о конкретном датасете по его ID.
# @PARAM: dataset_id (int) - ID датасета. # @PARAM: dataset_id (int) - ID датасета.

View File

@@ -42,6 +42,8 @@ def suggest_mappings(source_databases: List[Dict], target_databases: List[Dict],
name, score, index = match name, score, index = match
if score >= threshold: if score >= threshold:
suggestions.append({ suggestions.append({
"source_db": s_db['database_name'],
"target_db": target_databases[index]['database_name'],
"source_db_uuid": s_db['uuid'], "source_db_uuid": s_db['uuid'],
"target_db_uuid": target_databases[index]['uuid'], "target_db_uuid": target_databases[index]['uuid'],
"confidence": score / 100.0 "confidence": score / 100.0

View File

@@ -118,14 +118,41 @@ class APIClient:
def _init_session(self) -> requests.Session: def _init_session(self) -> requests.Session:
with belief_scope("_init_session"): with belief_scope("_init_session"):
session = requests.Session() session = requests.Session()
# Create a custom adapter that handles TLS issues
class TLSAdapter(HTTPAdapter):
def init_poolmanager(self, connections, maxsize, block=False):
from urllib3.poolmanager import PoolManager
import ssl
# Create an SSL context that ignores TLSv1 unrecognized name errors
ctx = ssl.create_default_context()
ctx.set_ciphers('HIGH:!aNULL:!eNULL:!EXPORT:!DES:!RC4:!MD5:!PSK:!SRP:!CAMELLIA')
# Ignore TLSV1_UNRECOGNIZED_NAME errors by disabling hostname verification
# This is safe when verify_ssl is false (we're already not verifying the certificate)
ctx.check_hostname = False
self.poolmanager = PoolManager(
num_pools=connections,
maxsize=maxsize,
block=block,
ssl_context=ctx
)
retries = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504]) retries = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 502, 503, 504])
adapter = HTTPAdapter(max_retries=retries) adapter = TLSAdapter(max_retries=retries)
session.mount('http://', adapter) session.mount('http://', adapter)
session.mount('https://', adapter) session.mount('https://', adapter)
if not self.request_settings["verify_ssl"]: if not self.request_settings["verify_ssl"]:
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
app_logger.warning("[_init_session][State] SSL verification disabled.") app_logger.warning("[_init_session][State] SSL verification disabled.")
session.verify = self.request_settings["verify_ssl"] # When verify_ssl is false, we should also disable hostname verification
session.verify = False
else:
session.verify = True
return session return session
# [/DEF:_init_session:Function] # [/DEF:_init_session:Function]

View File

@@ -1,8 +1,8 @@
# [DEF:Dependencies:Module] # [DEF:Dependencies:Module]
# @SEMANTICS: dependency, injection, singleton, factory, auth, jwt # @SEMANTICS: dependency, injection, singleton, factory, auth, jwt
# @PURPOSE: Manages the creation and provision of shared application dependencies, such as the PluginLoader and TaskManager, to avoid circular imports. # @PURPOSE: Manages creation and provision of shared application dependencies, such as PluginLoader and TaskManager, to avoid circular imports.
# @LAYER: Core # @LAYER: Core
# @RELATION: Used by the main app and API routers to get access to shared instances. # @RELATION: Used by main app and API routers to get access to shared instances.
from pathlib import Path from pathlib import Path
from fastapi import Depends, HTTPException, status from fastapi import Depends, HTTPException, status
@@ -13,6 +13,7 @@ from .core.task_manager import TaskManager
from .core.config_manager import ConfigManager from .core.config_manager import ConfigManager
from .core.scheduler import SchedulerService from .core.scheduler import SchedulerService
from .services.resource_service import ResourceService from .services.resource_service import ResourceService
from .services.mapping_service import MappingService
from .core.database import init_db, get_auth_db from .core.database import init_db, get_auth_db
from .core.logger import logger from .core.logger import logger
from .core.auth.jwt import decode_token from .core.auth.jwt import decode_token
@@ -29,12 +30,12 @@ config_manager = ConfigManager(config_path=str(config_path))
init_db() init_db()
# [DEF:get_config_manager:Function] # [DEF:get_config_manager:Function]
# @PURPOSE: Dependency injector for the ConfigManager. # @PURPOSE: Dependency injector for ConfigManager.
# @PRE: Global config_manager must be initialized. # @PRE: Global config_manager must be initialized.
# @POST: Returns shared ConfigManager instance. # @POST: Returns shared ConfigManager instance.
# @RETURN: ConfigManager - The shared config manager instance. # @RETURN: ConfigManager - The shared config manager instance.
def get_config_manager() -> ConfigManager: def get_config_manager() -> ConfigManager:
"""Dependency injector for the ConfigManager.""" """Dependency injector for ConfigManager."""
return config_manager return config_manager
# [/DEF:get_config_manager:Function] # [/DEF:get_config_manager:Function]
@@ -54,54 +55,64 @@ resource_service = ResourceService()
logger.info("ResourceService initialized") logger.info("ResourceService initialized")
# [DEF:get_plugin_loader:Function] # [DEF:get_plugin_loader:Function]
# @PURPOSE: Dependency injector for the PluginLoader. # @PURPOSE: Dependency injector for PluginLoader.
# @PRE: Global plugin_loader must be initialized. # @PRE: Global plugin_loader must be initialized.
# @POST: Returns shared PluginLoader instance. # @POST: Returns shared PluginLoader instance.
# @RETURN: PluginLoader - The shared plugin loader instance. # @RETURN: PluginLoader - The shared plugin loader instance.
def get_plugin_loader() -> PluginLoader: def get_plugin_loader() -> PluginLoader:
"""Dependency injector for the PluginLoader.""" """Dependency injector for PluginLoader."""
return plugin_loader return plugin_loader
# [/DEF:get_plugin_loader:Function] # [/DEF:get_plugin_loader:Function]
# [DEF:get_task_manager:Function] # [DEF:get_task_manager:Function]
# @PURPOSE: Dependency injector for the TaskManager. # @PURPOSE: Dependency injector for TaskManager.
# @PRE: Global task_manager must be initialized. # @PRE: Global task_manager must be initialized.
# @POST: Returns shared TaskManager instance. # @POST: Returns shared TaskManager instance.
# @RETURN: TaskManager - The shared task manager instance. # @RETURN: TaskManager - The shared task manager instance.
def get_task_manager() -> TaskManager: def get_task_manager() -> TaskManager:
"""Dependency injector for the TaskManager.""" """Dependency injector for TaskManager."""
return task_manager return task_manager
# [/DEF:get_task_manager:Function] # [/DEF:get_task_manager:Function]
# [DEF:get_scheduler_service:Function] # [DEF:get_scheduler_service:Function]
# @PURPOSE: Dependency injector for the SchedulerService. # @PURPOSE: Dependency injector for SchedulerService.
# @PRE: Global scheduler_service must be initialized. # @PRE: Global scheduler_service must be initialized.
# @POST: Returns shared SchedulerService instance. # @POST: Returns shared SchedulerService instance.
# @RETURN: SchedulerService - The shared scheduler service instance. # @RETURN: SchedulerService - The shared scheduler service instance.
def get_scheduler_service() -> SchedulerService: def get_scheduler_service() -> SchedulerService:
"""Dependency injector for the SchedulerService.""" """Dependency injector for SchedulerService."""
return scheduler_service return scheduler_service
# [/DEF:get_scheduler_service:Function] # [/DEF:get_scheduler_service:Function]
# [DEF:get_resource_service:Function] # [DEF:get_resource_service:Function]
# @PURPOSE: Dependency injector for the ResourceService. # @PURPOSE: Dependency injector for ResourceService.
# @PRE: Global resource_service must be initialized. # @PRE: Global resource_service must be initialized.
# @POST: Returns shared ResourceService instance. # @POST: Returns shared ResourceService instance.
# @RETURN: ResourceService - The shared resource service instance. # @RETURN: ResourceService - The shared resource service instance.
def get_resource_service() -> ResourceService: def get_resource_service() -> ResourceService:
"""Dependency injector for the ResourceService.""" """Dependency injector for ResourceService."""
return resource_service return resource_service
# [/DEF:get_resource_service:Function] # [/DEF:get_resource_service:Function]
# [DEF:get_mapping_service:Function]
# @PURPOSE: Dependency injector for MappingService.
# @PRE: Global config_manager must be initialized.
# @POST: Returns new MappingService instance.
# @RETURN: MappingService - A new mapping service instance.
def get_mapping_service() -> MappingService:
"""Dependency injector for MappingService."""
return MappingService(config_manager)
# [/DEF:get_mapping_service:Function]
# [DEF:oauth2_scheme:Variable] # [DEF:oauth2_scheme:Variable]
# @PURPOSE: OAuth2 password bearer scheme for token extraction. # @PURPOSE: OAuth2 password bearer scheme for token extraction.
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login") oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/auth/login")
# [/DEF:oauth2_scheme:Variable] # [/DEF:oauth2_scheme:Variable]
# [DEF:get_current_user:Function] # [DEF:get_current_user:Function]
# @PURPOSE: Dependency for retrieving the currently authenticated user from a JWT. # @PURPOSE: Dependency for retrieving currently authenticated user from a JWT.
# @PRE: JWT token provided in Authorization header. # @PRE: JWT token provided in Authorization header.
# @POST: Returns the User object if token is valid. # @POST: Returns User object if token is valid.
# @THROW: HTTPException 401 if token is invalid or user not found. # @THROW: HTTPException 401 if token is invalid or user not found.
# @PARAM: token (str) - Extracted JWT token. # @PARAM: token (str) - Extracted JWT token.
# @PARAM: db (Session) - Auth database session. # @PARAM: db (Session) - Auth database session.

View File

@@ -0,0 +1,163 @@
#!/usr/bin/env python3
"""
Script to test dataset-to-dashboard relationships from Superset API.
Usage:
cd backend && .venv/bin/python3 src/scripts/test_dataset_dashboard_relations.py
"""
import json
import sys
from pathlib import Path
# Add src to path (parent of scripts directory)
sys.path.append(str(Path(__file__).parent.parent.parent))
from src.core.superset_client import SupersetClient
from src.core.config_manager import ConfigManager
from src.core.logger import logger
def test_dashboard_dataset_relations():
"""Test fetching dataset-to-dashboard relationships."""
# Load environment from existing config
config_manager = ConfigManager()
environments = config_manager.get_environments()
if not environments:
logger.error("No environments configured!")
return
# Use first available environment
env = environments[0]
logger.info(f"Using environment: {env.name} ({env.url})")
client = SupersetClient(env)
try:
# Authenticate
logger.info("Authenticating to Superset...")
client.authenticate()
logger.info("Authentication successful!")
# Test dashboard ID 13
dashboard_id = 13
logger.info(f"\n=== Fetching Dashboard {dashboard_id} ===")
dashboard = client.network.request(method="GET", endpoint=f"/dashboard/{dashboard_id}")
print("\nDashboard structure:")
print(f" ID: {dashboard.get('id')}")
print(f" Title: {dashboard.get('dashboard_title')}")
print(f" Published: {dashboard.get('published')}")
# Check for slices/charts
if 'slices' in dashboard:
logger.info(f"\n Found {len(dashboard['slices'])} slices/charts in dashboard")
for i, slice_data in enumerate(dashboard['slices'][:5]): # Show first 5
print(f" Slice {i+1}:")
print(f" ID: {slice_data.get('slice_id')}")
print(f" Name: {slice_data.get('slice_name')}")
# Check for datasource_id
if 'datasource_id' in slice_data:
print(f" Datasource ID: {slice_data['datasource_id']}")
if 'datasource_name' in slice_data:
print(f" Datasource Name: {slice_data['datasource_name']}")
if 'datasource_type' in slice_data:
print(f" Datasource Type: {slice_data['datasource_type']}")
else:
logger.warning(" No 'slices' field found in dashboard response")
logger.info(f" Available fields: {list(dashboard.keys())}")
# Test dataset ID 26
dataset_id = 26
logger.info(f"\n=== Fetching Dataset {dataset_id} ===")
dataset = client.get_dataset(dataset_id)
print("\nDataset structure:")
print(f" ID: {dataset.get('id')}")
print(f" Table Name: {dataset.get('table_name')}")
print(f" Schema: {dataset.get('schema')}")
print(f" Database: {dataset.get('database', {}).get('database_name', 'Unknown')}")
# Check for dashboards that use this dataset
logger.info(f"\n=== Finding Dashboards using Dataset {dataset_id} ===")
# Method: Use Superset's related_objects API
try:
logger.info(f" Using /api/v1/dataset/{dataset_id}/related_objects endpoint...")
related_objects = client.network.request(
method="GET",
endpoint=f"/dataset/{dataset_id}/related_objects"
)
logger.info(f" Related objects response type: {type(related_objects)}")
logger.info(f" Related objects keys: {list(related_objects.keys()) if isinstance(related_objects, dict) else 'N/A'}")
# Check for dashboards in related objects
if 'dashboards' in related_objects:
dashboards = related_objects['dashboards']
logger.info(f" Found {len(dashboards)} dashboards using this dataset:")
for dash in dashboards:
logger.info(f" - Dashboard ID {dash.get('id')}: {dash.get('dashboard_title', dash.get('title', 'Unknown'))}")
elif 'result' in related_objects:
# Some Superset versions use 'result' wrapper
result = related_objects['result']
if 'dashboards' in result:
dashboards = result['dashboards']
logger.info(f" Found {len(dashboards)} dashboards using this dataset:")
for dash in dashboards:
logger.info(f" - Dashboard ID {dash.get('id')}: {dash.get('dashboard_title', dash.get('title', 'Unknown'))}")
else:
logger.warning(f" No 'dashboards' key in result. Keys: {list(result.keys())}")
else:
logger.warning(f" No 'dashboards' key in response. Available keys: {list(related_objects.keys())}")
logger.info(f" Full related_objects response:")
print(json.dumps(related_objects, indent=2, default=str)[:1000])
except Exception as e:
logger.error(f" Error fetching related objects: {e}")
import traceback
traceback.print_exc()
# Method 2: Try to use the position_json from dashboard
logger.info(f"\n=== Analyzing Dashboard Position JSON ===")
if 'position_json' in dashboard:
position_data = json.loads(dashboard['position_json'])
logger.info(f" Position data type: {type(position_data)}")
# Look for datasource references
datasource_ids = set()
if isinstance(position_data, dict):
for key, value in position_data.items():
if 'datasource' in key.lower() or key == 'DASHBOARD_VERSION_KEY':
logger.debug(f" Key: {key}, Value type: {type(value)}")
elif isinstance(position_data, list):
logger.info(f" Position data has {len(position_data)} items")
for item in position_data[:3]: # Show first 3
logger.debug(f" Item: {type(item)}, keys: {list(item.keys()) if isinstance(item, dict) else 'N/A'}")
if isinstance(item, dict):
if 'datasource_id' in item:
datasource_ids.add(item['datasource_id'])
if datasource_ids:
logger.info(f" Found datasource IDs: {datasource_ids}")
# Save full response for analysis
output_file = Path(__file__).parent / "dataset_dashboard_analysis.json"
with open(output_file, 'w') as f:
json.dump({
'dashboard': dashboard,
'dataset': dataset
}, f, indent=2, default=str)
logger.info(f"\nFull response saved to: {output_file}")
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
raise
if __name__ == "__main__":
test_dashboard_dataset_relations()

Binary file not shown.

View File

@@ -1,6 +1,8 @@
# [DEF:backend.tests.test_dashboards_api:Module] # [DEF:backend.tests.test_dashboards_api:Module]
# @TIER: STANDARD # @TIER: STANDARD
# @PURPOSE: Contract-driven tests for Dashboard Hub API # @PURPOSE: Contract-driven tests for Dashboard Hub API
# @LAYER: Domain (Tests)
# @SEMANTICS: tests, dashboards, api, contract
# @RELATION: TESTS -> backend.src.api.routes.dashboards # @RELATION: TESTS -> backend.src.api.routes.dashboards
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
@@ -48,6 +50,8 @@ def test_get_dashboards_success():
# Validate against Pydantic model # Validate against Pydantic model
DashboardsResponse(**data) DashboardsResponse(**data)
# [/DEF:test_get_dashboards_success:Function]
# [DEF:test_get_dashboards_env_not_found:Function] # [DEF:test_get_dashboards_env_not_found:Function]
# @TEST: GET /api/dashboards returns 404 if env_id missing # @TEST: GET /api/dashboards returns 404 if env_id missing
# @PRE: env_id does not exist # @PRE: env_id does not exist
@@ -64,4 +68,6 @@ def test_get_dashboards_env_not_found():
assert response.status_code == 404 assert response.status_code == 404
assert "Environment not found" in response.json()["detail"] assert "Environment not found" in response.json()["detail"]
# [/DEF:test_get_dashboards_env_not_found:Function]
# [/DEF:backend.tests.test_dashboards_api:Module] # [/DEF:backend.tests.test_dashboards_api:Module]

View File

@@ -44,4 +44,6 @@ async def test_get_dashboards_with_status():
assert result[0]["last_task"]["task_id"] == "task-123" assert result[0]["last_task"]["task_id"] == "task-123"
assert result[0]["last_task"]["status"] == "RUNNING" assert result[0]["last_task"]["status"] == "RUNNING"
# [/DEF:test_get_dashboards_with_status:Function]
# [/DEF:backend.tests.test_resource_service:Module] # [/DEF:backend.tests.test_resource_service:Module]

View File

@@ -165,12 +165,32 @@ export const api = {
getStorageSettings: () => fetchApi('/settings/storage'), getStorageSettings: () => fetchApi('/settings/storage'),
updateStorageSettings: (storage) => requestApi('/settings/storage', 'PUT', storage), updateStorageSettings: (storage) => requestApi('/settings/storage', 'PUT', storage),
getEnvironmentsList: () => fetchApi('/environments'), getEnvironmentsList: () => fetchApi('/environments'),
getEnvironmentDatabases: (id) => fetchApi(`/environments/${id}/databases`),
// Dashboards // Dashboards
getDashboards: (envId) => fetchApi(`/dashboards?env_id=${envId}`), getDashboards: (envId, options = {}) => {
const params = new URLSearchParams({ env_id: envId });
if (options.search) params.append('search', options.search);
if (options.page) params.append('page', options.page);
if (options.page_size) params.append('page_size', options.page_size);
return fetchApi(`/dashboards?${params.toString()}`);
},
getDatabaseMappings: (sourceEnvId, targetEnvId) => fetchApi(`/dashboards/db-mappings?source_env_id=${sourceEnvId}&target_env_id=${targetEnvId}`),
// Datasets // Datasets
getDatasets: (envId) => fetchApi(`/datasets?env_id=${envId}`), getDatasets: (envId, options = {}) => {
const params = new URLSearchParams({ env_id: envId });
if (options.search) params.append('search', options.search);
if (options.page) params.append('page', options.page);
if (options.page_size) params.append('page_size', options.page_size);
return fetchApi(`/datasets?${params.toString()}`);
},
getDatasetIds: (envId, options = {}) => {
const params = new URLSearchParams({ env_id: envId });
if (options.search) params.append('search', options.search);
return fetchApi(`/datasets/ids?${params.toString()}`);
},
getDatasetDetail: (envId, datasetId) => fetchApi(`/datasets/${datasetId}?env_id=${envId}`),
// Settings // Settings
getConsolidatedSettings: () => fetchApi('/settings/consolidated'), getConsolidatedSettings: () => fetchApi('/settings/consolidated'),

View File

@@ -1,3 +1,18 @@
<!-- [DEF:frontend.src.routes.+layout:Module] -->
<!--
@TIER: STANDARD
@SEMANTICS: layout, root, navigation, sidebar, toast
@PURPOSE: Root layout component that provides global UI structure (Sidebar, Navbar, Footer, TaskDrawer, Toasts).
@LAYER: UI (Layout)
@RELATION: DEPENDS_ON -> Sidebar
@RELATION: DEPENDS_ON -> TopNavbar
@RELATION: DEPENDS_ON -> Footer
@RELATION: DEPENDS_ON -> Toast
@RELATION: DEPENDS_ON -> ProtectedRoute
@RELATION: DEPENDS_ON -> TaskDrawer
@INVARIANT: All pages except /login are wrapped in ProtectedRoute.
-->
<!-- [DEF:layout:Module] --> <!-- [DEF:layout:Module] -->
<script> <script>
import '../app.css'; import '../app.css';
@@ -29,14 +44,16 @@
<Sidebar /> <Sidebar />
<!-- Main content area with TopNavbar --> <!-- Main content area with TopNavbar -->
<div class="flex flex-col {isExpanded ? 'ml-60' : 'ml-16'} transition-all duration-200"> <div class="flex flex-col min-h-screen {isExpanded ? 'md:ml-60' : 'md:ml-16'} transition-all duration-200">
<!-- Top Navigation Bar --> <!-- Top Navigation Bar -->
<TopNavbar /> <TopNavbar />
<!-- Breadcrumbs --> <!-- Breadcrumbs -->
<Breadcrumbs /> <div class="mt-16">
<Breadcrumbs />
</div>
<!-- Page content --> <!-- Page content -->
<div class="p-4 pt-20"> <div class="p-4 flex-grow">
<slot /> <slot />
</div> </div>
@@ -50,3 +67,4 @@
{/if} {/if}
</main> </main>
<!-- [/DEF:layout:Module] --> <!-- [/DEF:layout:Module] -->
<!-- [/DEF:frontend.src.routes.+layout:Module] -->

View File

@@ -10,7 +10,11 @@
* @UX_STATE: Loading -> Shows skeleton loader * @UX_STATE: Loading -> Shows skeleton loader
* @UX_STATE: Loaded -> Shows dataset grid with mapping progress * @UX_STATE: Loaded -> Shows dataset grid with mapping progress
* @UX_STATE: Error -> Shows error banner with retry button * @UX_STATE: Error -> Shows error banner with retry button
* @UX_STATE: Selecting -> Checkboxes checked, floating action panel appears
* @UX_STATE: BulkAction-Modal -> Map Columns or Generate Docs modal open
* @UX_FEEDBACK: Clicking task status opens Task Drawer * @UX_FEEDBACK: Clicking task status opens Task Drawer
* @UX_FEEDBACK: Mapped % column shows progress bar + percentage text
* @UX_FEEDBACK: Floating panel slides up from bottom when items selected
* @UX_RECOVERY: Refresh button reloads dataset list * @UX_RECOVERY: Refresh button reloads dataset list
*/ */
@@ -19,6 +23,7 @@
import { t } from '$lib/i18n'; import { t } from '$lib/i18n';
import { openDrawerForTask } from '$lib/stores/taskDrawer.js'; import { openDrawerForTask } from '$lib/stores/taskDrawer.js';
import { api } from '$lib/api.js'; import { api } from '$lib/api.js';
import { debounce } from '$lib/utils/debounce.js';
// State // State
let selectedEnv = null; let selectedEnv = null;
@@ -26,6 +31,39 @@
let isLoading = true; let isLoading = true;
let error = null; let error = null;
// Pagination state
let currentPage = 1;
let pageSize = 10;
let totalPages = 1;
let total = 0;
// Selection state
let selectedIds = new Set();
let isAllSelected = false;
let isAllVisibleSelected = false;
// Search state
let searchQuery = '';
// Bulk action modal state
let showMapColumnsModal = false;
let showGenerateDocsModal = false;
let mapSourceType = 'postgresql';
let mapConnectionId = '';
let mapFileData = null;
let mapFileInput;
let llmProvider = '';
let llmOptions = {};
// Environment options - will be loaded from API
let environments = [];
// Debounced search function
const debouncedSearch = debounce((query) => {
searchQuery = query;
loadDatasets();
}, 300);
// Load environments and datasets on mount // Load environments and datasets on mount
onMount(async () => { onMount(async () => {
await loadEnvironments(); await loadEnvironments();
@@ -59,7 +97,21 @@
isLoading = true; isLoading = true;
error = null; error = null;
try { try {
const response = await api.getDatasets(selectedEnv); const response = await api.getDatasets(selectedEnv, {
search: searchQuery || undefined,
page: currentPage,
page_size: pageSize
});
// Preserve selected IDs across pagination
const newSelectedIds = new Set();
response.datasets.forEach(d => {
if (selectedIds.has(d.id)) {
newSelectedIds.add(d.id);
}
});
selectedIds = newSelectedIds;
datasets = response.datasets.map(d => ({ datasets = response.datasets.map(d => ({
id: d.id, id: d.id,
table_name: d.table_name, table_name: d.table_name,
@@ -75,6 +127,13 @@
} : null, } : null,
actions: ['map_columns'] // All datasets have map columns option actions: ['map_columns'] // All datasets have map columns option
})); }));
// Update pagination state
total = response.total;
totalPages = response.total_pages;
// Update selection state
updateSelectionState();
} catch (err) { } catch (err) {
error = err.message || 'Failed to load datasets'; error = err.message || 'Failed to load datasets';
console.error('[DatasetHub][Coherence:Failed]', err); console.error('[DatasetHub][Coherence:Failed]', err);
@@ -86,15 +145,182 @@
// Handle environment change // Handle environment change
function handleEnvChange(event) { function handleEnvChange(event) {
selectedEnv = event.target.value; selectedEnv = event.target.value;
currentPage = 1;
selectedIds.clear();
loadDatasets(); loadDatasets();
} }
// Handle search input
function handleSearch(event) {
debouncedSearch(event.target.value);
}
// Handle page change
function handlePageChange(page) {
currentPage = page;
loadDatasets();
}
// Handle page size change
function handlePageSizeChange(event) {
pageSize = parseInt(event.target.value);
currentPage = 1;
loadDatasets();
}
// Update selection state based on current selection
function updateSelectionState() {
const visibleCount = datasets.length;
const totalCount = total;
isAllSelected = selectedIds.size === totalCount && totalCount > 0;
isAllVisibleSelected = selectedIds.size === visibleCount && visibleCount > 0;
}
// Handle checkbox change for individual dataset
function handleCheckboxChange(dataset, event) {
if (event.target.checked) {
selectedIds.add(dataset.id);
} else {
selectedIds.delete(dataset.id);
}
selectedIds = selectedIds; // Trigger reactivity
updateSelectionState();
}
// Handle select all
async function handleSelectAll() {
if (isAllSelected) {
selectedIds.clear();
} else {
// Get all dataset IDs from API (including non-visible ones)
try {
const response = await api.getDatasetIds(selectedEnv, {
search: searchQuery || undefined
});
response.dataset_ids.forEach(id => selectedIds.add(id));
} catch (err) {
console.error('[DatasetHub][Coherence:Failed] Failed to fetch all dataset IDs:', err);
// Fallback to selecting visible datasets if API fails
datasets.forEach(d => selectedIds.add(d.id));
}
}
selectedIds = selectedIds; // Trigger reactivity
updateSelectionState();
}
// Handle select visible
function handleSelectVisible() {
if (isAllVisibleSelected) {
datasets.forEach(d => selectedIds.delete(d.id));
} else {
datasets.forEach(d => selectedIds.add(d.id));
}
selectedIds = selectedIds; // Trigger reactivity
updateSelectionState();
}
// Handle action click // Handle action click
function handleAction(dataset, action) { function handleAction(dataset, action) {
console.log(`[DatasetHub][Action] ${action} on dataset ${dataset.table_name}`); console.log(`[DatasetHub][Action] ${action} on dataset ${dataset.table_name}`);
if (action === 'map_columns') { if (action === 'map_columns') {
// Navigate to mapping interface // Show map columns modal
goto(`/mapper?dataset_id=${dataset.id}`); showMapColumnsModal = true;
mapSourceType = 'postgresql';
mapConnectionId = null;
mapFileData = null;
} else if (action === 'generate_docs') {
// Show generate docs modal
showGenerateDocsModal = true;
llmProvider = '';
llmOptions = {};
}
}
// Handle bulk map columns
async function handleBulkMapColumns() {
console.log('[DatasetHub][handleBulkMapColumns][Entry]', {
selectedIds: Array.from(selectedIds),
mapSourceType,
mapConnectionId,
mapFileData
});
if (selectedIds.size === 0) {
console.log('[DatasetHub][handleBulkMapColumns] No datasets selected');
return;
}
if (mapSourceType === 'postgresql' && !mapConnectionId) {
console.log('[DatasetHub][handleBulkMapColumns] No connection ID provided for PostgreSQL');
return;
}
if (mapSourceType === 'xlsx' && (!mapFileData || mapFileData.length === 0)) {
console.log('[DatasetHub][handleBulkMapColumns] No file selected for XLSX');
return;
}
try {
let fileData = null;
if (mapSourceType === 'xlsx' && mapFileData && mapFileData.length > 0) {
// For now we send the filename as a placeholder or handle upload if needed.
// The backend expects a string 'file_data' in the current schema.
fileData = mapFileData[0].name;
}
const response = await api.postApi('/datasets/map-columns', {
env_id: selectedEnv,
dataset_ids: Array.from(selectedIds),
source_type: mapSourceType,
connection_id: mapConnectionId || undefined,
file_data: fileData || undefined
});
console.log('[DatasetHub][Action] Bulk map columns task created:', response.task_id);
// Close modal and open task drawer
showMapColumnsModal = false;
selectedIds.clear();
updateSelectionState();
if (response.task_id) {
openDrawerForTask(response.task_id);
}
} catch (err) {
console.error('[DatasetHub][Coherence:Failed]', err);
alert('Failed to create mapping task');
}
}
// Handle bulk generate docs
async function handleBulkGenerateDocs() {
if (selectedIds.size === 0) return;
if (!llmProvider) {
alert('Please select an LLM provider');
return;
}
try {
const response = await api.postApi('/datasets/generate-docs', {
env_id: selectedEnv,
dataset_ids: Array.from(selectedIds),
llm_provider: llmProvider,
options: llmOptions
});
console.log('[DatasetHub][Action] Bulk generate docs task created:', response.task_id);
// Close modal and open task drawer
showGenerateDocsModal = false;
selectedIds.clear();
updateSelectionState();
if (response.task_id) {
openDrawerForTask(response.task_id);
}
} catch (err) {
console.error('[DatasetHub][Coherence:Failed]', err);
alert('Failed to create documentation generation task');
} }
} }
@@ -111,7 +337,7 @@
if (!status) return ''; if (!status) return '';
switch (status.toLowerCase()) { switch (status.toLowerCase()) {
case 'running': case 'running':
return '<svg class="animate-spin" width="16" height="16" viewBox="0 0 24 24"><path fill="currentColor" d="M12 2a10 10 0 1 0 10 10A10 10 0 0 0 12 2zm0 18a8 8 0 1 1 8-8 8 8 0 0 1-8 8z"/></svg>'; return '<svg class="animate-spin" width="16" height="16" viewBox="0 0 24 24"><path fill="currentColor" d="M12 2a10 10 0 1 0 10 10A10 10 0 0 0 12 2zm0 18a8 8 0 1 1 8-8 8 0 0 1-8 8z"/></svg>';
case 'success': case 'success':
return '<svg width="16" height="16" viewBox="0 0 24 24" fill="currentColor"><path d="M9 16.17L4.83 12l-1.42 1.41L9 19 21 7l-1.41-1.41L9 16.17z"/></svg>'; return '<svg width="16" height="16" viewBox="0 0 24 24" fill="currentColor"><path d="M9 16.17L4.83 12l-1.42 1.41L9 19 21 7l-1.41-1.41L9 16.17z"/></svg>';
case 'error': case 'error':
@@ -162,6 +388,10 @@
@apply px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition-colors; @apply px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 transition-colors;
} }
.search-input {
@apply px-4 py-2 border border-gray-300 rounded-lg bg-white focus:outline-none focus:ring-2 focus:ring-blue-500;
}
.error-banner { .error-banner {
@apply bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded mb-4 flex items-center justify-between; @apply bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded mb-4 flex items-center justify-between;
} }
@@ -170,6 +400,14 @@
@apply px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition-colors; @apply px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition-colors;
} }
.toolbar {
@apply flex items-center justify-between mb-4 gap-4;
}
.selection-buttons {
@apply flex items-center gap-2;
}
.dataset-grid { .dataset-grid {
@apply bg-white border border-gray-200 rounded-lg overflow-hidden; @apply bg-white border border-gray-200 rounded-lg overflow-hidden;
} }
@@ -186,6 +424,10 @@
@apply border-b-0; @apply border-b-0;
} }
.col-checkbox {
@apply col-span-1;
}
.col-table-name { .col-table-name {
@apply col-span-3 font-medium text-gray-900; @apply col-span-3 font-medium text-gray-900;
} }
@@ -203,7 +445,7 @@
} }
.col-actions { .col-actions {
@apply col-span-2; @apply col-span-1;
} }
.mapping-progress { .mapping-progress {
@@ -233,6 +475,58 @@
.skeleton { .skeleton {
@apply animate-pulse bg-gray-200 rounded; @apply animate-pulse bg-gray-200 rounded;
} }
.floating-panel {
@apply fixed bottom-0 left-0 right-0 bg-white border-t border-gray-200 shadow-lg p-4 transition-transform transform translate-y-full;
}
.floating-panel.visible {
@apply transform translate-y-0;
}
.modal-overlay {
@apply fixed inset-0 bg-black bg-opacity-50 flex items-center justify-center z-50;
}
.modal {
@apply bg-white rounded-lg shadow-xl max-w-2xl w-full mx-4 max-h-[80vh] overflow-y-auto;
}
.modal-header {
@apply px-6 py-4 border-b border-gray-200 flex items-center justify-between relative;
}
.close-modal-btn {
@apply absolute top-4 right-4 p-2 text-gray-400 hover:text-gray-600 hover:bg-gray-100 rounded-full transition-all;
}
.modal-body {
@apply px-6 py-4;
}
.modal-footer {
@apply px-6 py-4 border-t border-gray-200 flex justify-end gap-3;
}
.pagination {
@apply flex items-center justify-between px-4 py-3 bg-gray-50 border-t border-gray-200;
}
.pagination-info {
@apply text-sm text-gray-600;
}
.pagination-controls {
@apply flex items-center gap-2;
}
.page-btn {
@apply px-3 py-1 border border-gray-300 rounded hover:bg-gray-100 disabled:opacity-50 disabled:cursor-not-allowed;
}
.page-btn.active {
@apply bg-blue-600 text-white border-blue-600;
}
</style> </style>
<div class="container"> <div class="container">
@@ -265,6 +559,7 @@
{#if isLoading} {#if isLoading}
<div class="dataset-grid"> <div class="dataset-grid">
<div class="grid-header"> <div class="grid-header">
<div class="col-checkbox skeleton h-4"></div>
<div class="col-table-name skeleton h-4"></div> <div class="col-table-name skeleton h-4"></div>
<div class="col-schema skeleton h-4"></div> <div class="col-schema skeleton h-4"></div>
<div class="col-mapping skeleton h-4"></div> <div class="col-mapping skeleton h-4"></div>
@@ -273,6 +568,7 @@
</div> </div>
{#each Array(5) as _} {#each Array(5) as _}
<div class="grid-row"> <div class="grid-row">
<div class="col-checkbox skeleton h-4"></div>
<div class="col-table-name skeleton h-4"></div> <div class="col-table-name skeleton h-4"></div>
<div class="col-schema skeleton h-4"></div> <div class="col-schema skeleton h-4"></div>
<div class="col-mapping skeleton h-4"></div> <div class="col-mapping skeleton h-4"></div>
@@ -290,10 +586,45 @@
<p>{$t.datasets?.empty || 'No datasets found'}</p> <p>{$t.datasets?.empty || 'No datasets found'}</p>
</div> </div>
{:else} {:else}
<!-- Toolbar -->
<div class="toolbar">
<div class="selection-buttons">
<button
class="action-btn"
on:click={handleSelectAll}
disabled={total === 0}
>
{isAllSelected ? 'Deselect All' : 'Select All'}
</button>
<button
class="action-btn"
on:click={handleSelectVisible}
disabled={datasets.length === 0}
>
{isAllVisibleSelected ? 'Deselect Visible' : 'Select Visible'}
</button>
{#if selectedIds.size > 0}
<span class="text-sm text-gray-600">
{selectedIds.size} selected
</span>
{/if}
</div>
<div>
<input
type="text"
class="search-input"
placeholder="Search datasets..."
on:input={handleSearch}
value={searchQuery}
/>
</div>
</div>
<!-- Dataset Grid --> <!-- Dataset Grid -->
<div class="dataset-grid"> <div class="dataset-grid">
<!-- Grid Header --> <!-- Grid Header -->
<div class="grid-header"> <div class="grid-header">
<div class="col-checkbox"></div>
<div class="col-table-name">{$t.datasets?.table_name || 'Table Name'}</div> <div class="col-table-name">{$t.datasets?.table_name || 'Table Name'}</div>
<div class="col-schema">{$t.datasets?.schema || 'Schema'}</div> <div class="col-schema">{$t.datasets?.schema || 'Schema'}</div>
<div class="col-mapping">{$t.datasets?.mapped_fields || 'Mapped Fields'}</div> <div class="col-mapping">{$t.datasets?.mapped_fields || 'Mapped Fields'}</div>
@@ -304,9 +635,23 @@
<!-- Grid Rows --> <!-- Grid Rows -->
{#each datasets as dataset} {#each datasets as dataset}
<div class="grid-row"> <div class="grid-row">
<!-- Checkbox -->
<div class="col-checkbox">
<input
type="checkbox"
checked={selectedIds.has(dataset.id)}
on:change={(e) => handleCheckboxChange(dataset, e)}
/>
</div>
<!-- Table Name --> <!-- Table Name -->
<div class="col-table-name"> <div class="col-table-name">
{dataset.table_name} <a
href={`/datasets/${dataset.id}?env_id=${selectedEnv}`}
class="text-blue-600 hover:text-blue-800 hover:underline"
>
{dataset.table_name}
</a>
</div> </div>
<!-- Schema --> <!-- Schema -->
@@ -355,21 +700,243 @@
<!-- Actions --> <!-- Actions -->
<div class="col-actions"> <div class="col-actions">
<div class="flex space-x-2"> {#if dataset.actions.includes('map_columns')}
{#if dataset.actions.includes('map_columns')} <button
<button class="action-btn primary"
class="action-btn primary" on:click={() => handleAction(dataset, 'map_columns')}
on:click={() => handleAction(dataset, 'map_columns')} aria-label={$t.datasets?.action_map_columns || 'Map Columns'}
aria-label={$t.datasets?.action_map_columns || 'Map Columns'} >
> {$t.datasets?.action_map_columns || 'Map Columns'}
{$t.datasets?.action_map_columns || 'Map Columns'} </button>
</button> {/if}
{/if}
</div>
</div> </div>
</div> </div>
{/each} {/each}
</div> </div>
<!-- Pagination -->
{#if totalPages > 1}
<div class="pagination">
<div class="pagination-info">
Showing {((currentPage - 1) * pageSize) + 1}-{Math.min(currentPage * pageSize, total)} of {total}
</div>
<div class="pagination-controls">
<button
class="page-btn"
on:click={() => handlePageChange(1)}
disabled={currentPage === 1}
>
First
</button>
<button
class="page-btn"
on:click={() => handlePageChange(currentPage - 1)}
disabled={currentPage === 1}
>
Previous
</button>
{#each Array.from({length: totalPages}, (_, i) => i + 1) as pageNum}
<button
class="page-btn {pageNum === currentPage ? 'active' : ''}"
on:click={() => handlePageChange(pageNum)}
>
{pageNum}
</button>
{/each}
<button
class="page-btn"
on:click={() => handlePageChange(currentPage + 1)}
disabled={currentPage === totalPages}
>
Next
</button>
<button
class="page-btn"
on:click={() => handlePageChange(totalPages)}
disabled={currentPage === totalPages}
>
Last
</button>
</div>
<div>
<select
class="env-dropdown"
value={pageSize}
on:change={handlePageSizeChange}
>
<option value={5}>5 per page</option>
<option value={10}>10 per page</option>
<option value={25}>25 per page</option>
<option value={50}>50 per page</option>
<option value={100}>100 per page</option>
</select>
</div>
</div>
{/if}
<!-- Floating Bulk Action Panel -->
{#if selectedIds.size > 0}
<div class="floating-panel visible">
<div class="flex items-center justify-between max-w-7xl mx-auto">
<div class="flex items-center gap-4">
<span class="font-medium">
{selectedIds.size} selected
</span>
</div>
<div class="flex gap-3">
<button
class="action-btn primary"
on:click={() => showMapColumnsModal = true}
>
Map Columns
</button>
<button
class="action-btn primary"
on:click={() => showGenerateDocsModal = true}
>
Generate Docs
</button>
<button
class="action-btn"
on:click={() => selectedIds.clear()}
>
Cancel
</button>
</div>
</div>
</div>
{/if}
{/if}
<!-- Map Columns Modal -->
{#if showMapColumnsModal}
<div class="modal-overlay" on:click={() => showMapColumnsModal = false}>
<div class="modal" on:click|stopPropagation>
<div class="modal-header">
<h2 class="text-xl font-bold">Bulk Column Mapping</h2>
<button on:click={() => showMapColumnsModal = false} class="close-modal-btn" aria-label="Close modal">
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<line x1="18" y1="6" x2="6" y2="18"></line>
<line x1="6" y1="6" x2="18" y2="18"></line>
</svg>
</button>
</div>
<div class="modal-body">
<div class="space-y-4">
<div>
<label class="block text-sm font-medium mb-2">Source Type</label>
<select
class="env-dropdown w-full"
bind:value={mapSourceType}
>
<option value="postgresql">PostgreSQL Comments</option>
<option value="xlsx">XLSX File</option>
</select>
</div>
{#if mapSourceType === 'postgresql'}
<div>
<label class="block text-sm font-medium mb-2">Connection ID</label>
<input
type="text"
class="search-input w-full"
placeholder="Enter connection ID..."
bind:value={mapConnectionId}
/>
</div>
{:else}
<div>
<label class="block text-sm font-medium mb-2">XLSX File</label>
<input
type="file"
class="w-full"
accept=".xlsx,.xls"
bind:files={mapFileData}
bind:this={mapFileInput}
/>
</div>
{/if}
<div>
<label class="block text-sm font-medium mb-2">Selected Datasets</label>
<div class="max-h-40 overflow-y-auto">
{#each Array.from(selectedIds) as id}
{#each datasets as d}
{#if d.id === id}
<div class="text-sm py-1 border-b border-gray-200">{d.table_name}</div>
{/if}
{/each}
{/each}
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button class="action-btn" on:click={() => showMapColumnsModal = false}>Cancel</button>
<button
type="button"
class="action-btn primary"
on:click|preventDefault={handleBulkMapColumns}
disabled={selectedIds.size === 0 || (mapSourceType === 'postgresql' && !mapConnectionId) || (mapSourceType === 'xlsx' && (!mapFileData || mapFileData.length === 0))}
>
Start Mapping
</button>
</div>
</div>
</div>
{/if}
<!-- Generate Docs Modal -->
{#if showGenerateDocsModal}
<div class="modal-overlay" on:click={() => showGenerateDocsModal = false}>
<div class="modal" on:click|stopPropagation>
<div class="modal-header">
<h2 class="text-xl font-bold">Bulk Documentation Generation</h2>
<button on:click={() => showGenerateDocsModal = false} class="close-modal-btn" aria-label="Close modal">
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<line x1="18" y1="6" x2="6" y2="18"></line>
<line x1="6" y1="6" x2="18" y2="18"></line>
</svg>
</button>
</div>
<div class="modal-body">
<div class="space-y-4">
<div>
<label class="block text-sm font-medium mb-2">LLM Provider</label>
<select
class="env-dropdown w-full"
bind:value={llmProvider}
>
<option value="">Select LLM provider...</option>
<option value="openai">OpenAI</option>
<option value="anthropic">Anthropic</option>
<option value="cohere">Cohere</option>
</select>
</div>
<div>
<label class="block text-sm font-medium mb-2">Selected Datasets</label>
<div class="max-h-40 overflow-y-auto">
{#each Array.from(selectedIds) as id}
{#each datasets as d}
{#if d.id === id}
<div class="text-sm py-1 border-b border-gray-200">{d.table_name}</div>
{/if}
{/each}
{/each}
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button class="action-btn" on:click={() => showGenerateDocsModal = false}>Cancel</button>
<button
class="action-btn primary"
on:click={handleBulkGenerateDocs}
disabled={!llmProvider || selectedIds.size === 0}
>
Generate Documentation
</button>
</div>
</div>
</div>
{/if} {/if}
</div> </div>

View File

@@ -0,0 +1,418 @@
<!-- [DEF:DatasetDetail:Page] -->
<script>
/**
* @TIER: CRITICAL
* @PURPOSE: Dataset Detail View - Shows detailed dataset information with columns, SQL, and linked dashboards
* @LAYER: UI
* @RELATION: BINDS_TO -> sidebarStore
* @INVARIANT: Always shows dataset details when loaded
*
* @UX_STATE: Loading -> Shows skeleton loader
* @UX_STATE: Loaded -> Shows dataset details with columns and linked dashboards
* @UX_STATE: Error -> Shows error banner with retry button
* @UX_FEEDBACK: Clicking linked dashboard navigates to dashboard detail
* @UX_RECOVERY: Refresh button reloads dataset details
*/
import { onMount } from 'svelte';
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { t } from '$lib/i18n';
import { api } from '$lib/api.js';
import { openDrawerForTask } from '$lib/stores/taskDrawer.js';
// Get dataset ID from URL params
$: datasetId = $page.params.id;
$: envId = $page.url.searchParams.get('env_id') || '';
// State
let dataset = null;
let isLoading = true;
let error = null;
// Load dataset details on mount
onMount(async () => {
await loadDatasetDetail();
});
// Load dataset details from API
async function loadDatasetDetail() {
if (!datasetId || !envId) {
error = 'Missing dataset ID or environment ID';
isLoading = false;
return;
}
isLoading = true;
error = null;
try {
const response = await api.getDatasetDetail(envId, datasetId);
dataset = response;
} catch (err) {
error = err.message || 'Failed to load dataset details';
console.error('[DatasetDetail][Coherence:Failed]', err);
} finally {
isLoading = false;
}
}
// Navigate to linked dashboard
function navigateToDashboard(dashboardId) {
goto(`/dashboards/${dashboardId}?env_id=${envId}`);
}
// Navigate back to dataset list
function goBack() {
goto(`/dashboards?env_id=${envId}`);
}
// Get column type icon/color
function getColumnTypeClass(type) {
if (!type) return 'text-gray-500';
const lowerType = type.toLowerCase();
if (lowerType.includes('int') || lowerType.includes('float') || lowerType.includes('num')) {
return 'text-blue-600 bg-blue-50';
} else if (lowerType.includes('date') || lowerType.includes('time')) {
return 'text-green-600 bg-green-50';
} else if (lowerType.includes('str') || lowerType.includes('text') || lowerType.includes('char')) {
return 'text-purple-600 bg-purple-50';
} else if (lowerType.includes('bool')) {
return 'text-orange-600 bg-orange-50';
}
return 'text-gray-600 bg-gray-50';
}
// Get mapping progress percentage
function getMappingProgress(column) {
// Placeholder: In real implementation, this would check if column has mapping
return column.description ? 100 : 0;
}
</script>
<style>
.container {
@apply max-w-7xl mx-auto px-4 py-6;
}
.header {
@apply flex items-center justify-between mb-6;
}
.back-btn {
@apply flex items-center gap-2 text-gray-600 hover:text-gray-900 transition-colors;
}
.title {
@apply text-2xl font-bold text-gray-900;
}
.subtitle {
@apply text-sm text-gray-500 mt-1;
}
.detail-grid {
@apply grid grid-cols-1 lg:grid-cols-3 gap-6;
}
.detail-card {
@apply bg-white border border-gray-200 rounded-lg p-6;
}
.card-title {
@apply text-lg font-semibold text-gray-900 mb-4;
}
.info-row {
@apply flex justify-between py-2 border-b border-gray-100 last:border-0;
}
.info-label {
@apply text-sm text-gray-500;
}
.info-value {
@apply text-sm font-medium text-gray-900;
}
.columns-section {
@apply lg:col-span-2;
}
.columns-grid {
@apply grid grid-cols-1 md:grid-cols-2 gap-3;
}
.column-item {
@apply p-3 border border-gray-200 rounded-lg hover:border-blue-300 transition-colors;
}
.column-header {
@apply flex items-center justify-between mb-2;
}
.column-name {
@apply font-medium text-gray-900;
}
.column-type {
@apply text-xs px-2 py-1 rounded;
}
.column-meta {
@apply flex items-center gap-2 text-xs text-gray-500;
}
.column-description {
@apply text-sm text-gray-600 mt-2;
}
.mapping-badge {
@apply inline-flex items-center px-2 py-0.5 text-xs rounded-full;
}
.mapping-badge.mapped {
@apply bg-green-100 text-green-800;
}
.mapping-badge.unmapped {
@apply bg-gray-100 text-gray-600;
}
.linked-dashboards-list {
@apply space-y-2;
}
.linked-dashboard-item {
@apply flex items-center gap-3 p-3 border border-gray-200 rounded-lg hover:bg-gray-50 cursor-pointer transition-colors;
}
.dashboard-icon {
@apply w-8 h-8 bg-blue-100 rounded-lg flex items-center justify-center text-blue-600;
}
.dashboard-info {
@apply flex-1;
}
.dashboard-title {
@apply font-medium text-gray-900;
}
.dashboard-id {
@apply text-xs text-gray-500;
}
.sql-section {
@apply mt-6;
}
.sql-code {
@apply bg-gray-900 text-gray-100 p-4 rounded-lg overflow-x-auto text-sm font-mono;
}
.empty-state {
@apply py-8 text-center text-gray-500;
}
.skeleton {
@apply animate-pulse bg-gray-200 rounded;
}
.error-banner {
@apply bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded mb-4 flex items-center justify-between;
}
.retry-btn {
@apply px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition-colors;
}
</style>
<div class="container">
<!-- Header -->
<div class="header">
<div>
<button class="back-btn" on:click={goBack}>
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M19 12H5M12 19l-7-7 7-7"/>
</svg>
{$t.common?.back || 'Back to Datasets'}
</button>
{#if dataset}
<h1 class="title mt-4">{dataset.table_name}</h1>
<p class="subtitle">{dataset.schema}{dataset.database}</p>
{:else if !isLoading}
<h1 class="title mt-4">{$t.datasets?.detail_title || 'Dataset Details'}</h1>
{/if}
</div>
<button class="retry-btn" on:click={loadDatasetDetail}>
{$t.common?.refresh || 'Refresh'}
</button>
</div>
<!-- Error Banner -->
{#if error}
<div class="error-banner">
<span>{error}</span>
<button class="retry-btn" on:click={loadDatasetDetail}>
{$t.common?.retry || 'Retry'}
</button>
</div>
{/if}
<!-- Loading State -->
{#if isLoading}
<div class="detail-grid">
<div class="detail-card">
<div class="skeleton h-6 w-1/2 mb-4"></div>
{#each Array(5) as _}
<div class="info-row">
<div class="skeleton h-4 w-20"></div>
<div class="skeleton h-4 w-32"></div>
</div>
{/each}
</div>
<div class="detail-card columns-section">
<div class="skeleton h-6 w-1/3 mb-4"></div>
<div class="columns-grid">
{#each Array(4) as _}
<div class="column-item">
<div class="skeleton h-4 w-full mb-2"></div>
<div class="skeleton h-3 w-16"></div>
</div>
{/each}
</div>
</div>
</div>
{:else if dataset}
<div class="detail-grid">
<!-- Dataset Info Card -->
<div class="detail-card">
<h2 class="card-title">{$t.datasets?.info || 'Dataset Information'}</h2>
<div class="info-row">
<span class="info-label">{$t.datasets?.table_name || 'Table Name'}</span>
<span class="info-value">{dataset.table_name}</span>
</div>
<div class="info-row">
<span class="info-label">{$t.datasets?.schema || 'Schema'}</span>
<span class="info-value">{dataset.schema || '-'}</span>
</div>
<div class="info-row">
<span class="info-label">{$t.datasets?.database || 'Database'}</span>
<span class="info-value">{dataset.database}</span>
</div>
<div class="info-row">
<span class="info-label">{$t.datasets?.columns_count || 'Columns'}</span>
<span class="info-value">{dataset.column_count}</span>
</div>
<div class="info-row">
<span class="info-label">{$t.datasets?.linked_dashboards || 'Linked Dashboards'}</span>
<span class="info-value">{dataset.linked_dashboard_count}</span>
</div>
{#if dataset.is_sqllab_view}
<div class="info-row">
<span class="info-label">{$t.datasets?.type || 'Type'}</span>
<span class="info-value">SQL Lab View</span>
</div>
{/if}
{#if dataset.created_on}
<div class="info-row">
<span class="info-label">{$t.datasets?.created || 'Created'}</span>
<span class="info-value">{new Date(dataset.created_on).toLocaleDateString()}</span>
</div>
{/if}
{#if dataset.changed_on}
<div class="info-row">
<span class="info-label">{$t.datasets?.updated || 'Updated'}</span>
<span class="info-value">{new Date(dataset.changed_on).toLocaleDateString()}</span>
</div>
{/if}
</div>
<!-- Linked Dashboards Card -->
{#if dataset.linked_dashboards && dataset.linked_dashboards.length > 0}
<div class="detail-card">
<h2 class="card-title">{$t.datasets?.linked_dashboards || 'Linked Dashboards'} ({dataset.linked_dashboard_count})</h2>
<div class="linked-dashboards-list">
{#each dataset.linked_dashboards as dashboard}
<div
class="linked-dashboard-item"
on:click={() => navigateToDashboard(dashboard.id)}
role="button"
tabindex="0"
>
<div class="dashboard-icon">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<rect x="3" y="3" width="18" height="18" rx="2" ry="2"/>
<line x1="3" y1="9" x2="21" y2="9"/>
<line x1="9" y1="21" x2="9" y2="9"/>
</svg>
</div>
<div class="dashboard-info">
<div class="dashboard-title">{dashboard.title}</div>
<div class="dashboard-id">ID: {dashboard.id}{#if dashboard.slug}{dashboard.slug}{/if}</div>
</div>
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" class="text-gray-400">
<path d="M9 18l6-6-6-6"/>
</svg>
</div>
{/each}
</div>
</div>
{/if}
<!-- Columns Card -->
<div class="detail-card columns-section">
<h2 class="card-title">{$t.datasets?.columns || 'Columns'} ({dataset.column_count})</h2>
{#if dataset.columns && dataset.columns.length > 0}
<div class="columns-grid">
{#each dataset.columns as column}
<div class="column-item">
<div class="column-header">
<span class="column-name">{column.name}</span>
{#if column.type}
<span class="column-type {getColumnTypeClass(column.type)}">{column.type}</span>
{/if}
</div>
<div class="column-meta">
{#if column.is_dttm}
<span class="text-xs text-green-600">📅 Date/Time</span>
{/if}
{#if !column.is_active}
<span class="text-xs text-gray-400">(Inactive)</span>
{/if}
<span class="mapping-badge {column.description ? 'mapped' : 'unmapped'}">
{column.description ? '✓ Mapped' : 'Unmapped'}
</span>
</div>
{#if column.description}
<p class="column-description">{column.description}</p>
{/if}
</div>
{/each}
</div>
{:else}
<div class="empty-state">
{$t.datasets?.no_columns || 'No columns found'}
</div>
{/if}
</div>
<!-- SQL Section (for SQL Lab views) -->
{#if dataset.sql}
<div class="detail-card sql-section lg:col-span-3">
<h2 class="card-title">{$t.datasets?.sql_query || 'SQL Query'}</h2>
<pre class="sql-code">{dataset.sql}</pre>
</div>
{/if}
</div>
{:else}
<div class="empty-state">
<svg class="w-16 h-16 mx-auto mb-4 text-gray-400" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2">
<path d="M3 3h18v18H3V3zm16 16V5H5v14h14z"/>
</svg>
<p>{$t.datasets?.not_found || 'Dataset not found'}</p>
</div>
{/if}
</div>
<!-- [/DEF:DatasetDetail:Page] -->

View File

@@ -18,6 +18,7 @@
import { t } from '$lib/i18n'; import { t } from '$lib/i18n';
import { api } from '$lib/api.js'; import { api } from '$lib/api.js';
import { addToast } from '$lib/toasts'; import { addToast } from '$lib/toasts';
import ProviderConfig from '../../components/llm/ProviderConfig.svelte';
// State // State
let activeTab = 'environments'; let activeTab = 'environments';
@@ -25,6 +26,22 @@
let isLoading = true; let isLoading = true;
let error = null; let error = null;
// Environment editing state
let editingEnvId = null;
let isAddingEnv = false;
let newEnv = {
id: '',
name: '',
url: '',
username: '',
password: '',
is_default: false,
backup_schedule: {
enabled: false,
cron_expression: '0 0 * * *'
}
};
// Load settings on mount // Load settings on mount
onMount(async () => { onMount(async () => {
await loadSettings(); await loadSettings();
@@ -57,10 +74,13 @@
: 'text-gray-600 hover:text-gray-800 border-transparent hover:border-gray-300'; : 'text-gray-600 hover:text-gray-800 border-transparent hover:border-gray-300';
} }
// Handle save // Handle global settings save (Logging, Storage)
async function handleSave() { async function handleSave() {
console.log('[SettingsPage][Action] Saving settings'); console.log('[SettingsPage][Action] Saving settings');
try { try {
// In a real app we might want to only send the changed section,
// but updateConsolidatedSettings expects full object or we can use specific endpoints.
// For now we use the consolidated update.
await api.updateConsolidatedSettings(settings); await api.updateConsolidatedSettings(settings);
addToast($t.settings?.save_success || 'Settings saved', 'success'); addToast($t.settings?.save_success || 'Settings saved', 'success');
} catch (err) { } catch (err) {
@@ -68,6 +88,92 @@
addToast($t.settings?.save_failed || 'Failed to save settings', 'error'); addToast($t.settings?.save_failed || 'Failed to save settings', 'error');
} }
} }
// Handle environment actions
async function handleTestEnv(id) {
console.log(`[SettingsPage][Action] Test environment ${id}`);
addToast('Testing connection...', 'info');
try {
const result = await api.testEnvironmentConnection(id);
if (result.status === 'success') {
addToast('Connection successful', 'success');
} else {
addToast(`Connection failed: ${result.message}`, 'error');
}
} catch (err) {
console.error('[SettingsPage][Coherence:Failed] Error testing connection:', err);
addToast('Failed to test connection', 'error');
}
}
function editEnv(env) {
console.log(`[SettingsPage][Action] Edit environment ${env.id}`);
newEnv = JSON.parse(JSON.stringify(env)); // Deep copy
// Ensure backup_schedule exists
if (!newEnv.backup_schedule) {
newEnv.backup_schedule = { enabled: false, cron_expression: '0 0 * * *' };
}
editingEnvId = env.id;
isAddingEnv = false;
}
function resetEnvForm() {
newEnv = {
id: '',
name: '',
url: '',
username: '',
password: '',
is_default: false,
backup_schedule: {
enabled: false,
cron_expression: '0 0 * * *'
}
};
editingEnvId = null;
}
async function handleAddOrUpdateEnv() {
try {
console.log(`[SettingsPage][Action] ${editingEnvId ? 'Updating' : 'Adding'} environment.`);
// Basic validation
if (!newEnv.id || !newEnv.name || !newEnv.url) {
addToast('Please fill in all required fields (ID, Name, URL)', 'error');
return;
}
if (editingEnvId) {
await api.updateEnvironment(editingEnvId, newEnv);
addToast('Environment updated', 'success');
} else {
await api.addEnvironment(newEnv);
addToast('Environment added', 'success');
}
resetEnvForm();
editingEnvId = null;
isAddingEnv = false;
await loadSettings();
} catch (error) {
console.error("[SettingsPage][Coherence:Failed] Failed to save environment:", error);
addToast(error.message || 'Failed to save environment', 'error');
}
}
async function handleDeleteEnv(id) {
if (confirm('Are you sure you want to delete this environment?')) {
console.log(`[SettingsPage][Action] Delete environment ${id}`);
try {
await api.deleteEnvironment(id);
addToast('Environment deleted', 'success');
await loadSettings();
} catch (error) {
console.error("[SettingsPage][Coherence:Failed] Failed to delete environment:", error);
addToast('Failed to delete environment', 'error');
}
}
}
</script> </script>
<style> <style>
@@ -149,6 +255,12 @@
> >
{$t.settings?.environments || 'Environments'} {$t.settings?.environments || 'Environments'}
</button> </button>
<button
class="tab-btn {getTabClass('logging')}"
on:click={() => handleTabChange('logging')}
>
{$t.settings?.logging || 'Logging'}
</button>
<button <button
class="tab-btn {getTabClass('connections')}" class="tab-btn {getTabClass('connections')}"
on:click={() => handleTabChange('connections')} on:click={() => handleTabChange('connections')}
@@ -161,12 +273,6 @@
> >
{$t.settings?.llm || 'LLM'} {$t.settings?.llm || 'LLM'}
</button> </button>
<button
class="tab-btn {getTabClass('logging')}"
on:click={() => handleTabChange('logging')}
>
{$t.settings?.logging || 'Logging'}
</button>
<button <button
class="tab-btn {getTabClass('storage')}" class="tab-btn {getTabClass('storage')}"
on:click={() => handleTabChange('storage')} on:click={() => handleTabChange('storage')}
@@ -184,13 +290,87 @@
<p class="text-gray-600 mb-6"> <p class="text-gray-600 mb-6">
{$t.settings?.env_description || 'Configure Superset environments for dashboards and datasets.'} {$t.settings?.env_description || 'Configure Superset environments for dashboards and datasets.'}
</p> </p>
<div class="flex justify-end mb-6">
<button class="bg-blue-600 text-white px-4 py-2 rounded-lg hover:bg-blue-700"> {#if !editingEnvId && !isAddingEnv}
{$t.settings?.env_add || 'Add Environment'} <div class="flex justify-end mb-6">
</button> <button
</div> class="bg-blue-600 text-white px-4 py-2 rounded-lg hover:bg-blue-700"
on:click={() => { isAddingEnv = true; resetEnvForm(); }}
>
{$t.settings?.env_add || 'Add Environment'}
</button>
</div>
{/if}
{#if editingEnvId || isAddingEnv}
<!-- Add/Edit Environment Form -->
<div class="bg-gray-50 p-6 rounded-lg mb-6 border border-gray-200">
<h3 class="text-lg font-medium mb-4">{editingEnvId ? 'Edit' : 'Add'} Environment</h3>
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div>
<label for="env_id" class="block text-sm font-medium text-gray-700">ID</label>
<input
type="text"
id="env_id"
bind:value={newEnv.id}
disabled={!!editingEnvId}
class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2 disabled:bg-gray-100 disabled:text-gray-500"
/>
</div>
<div>
<label for="env_name" class="block text-sm font-medium text-gray-700">Name</label>
<input type="text" id="env_name" bind:value={newEnv.name} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="env_url" class="block text-sm font-medium text-gray-700">URL</label>
<input type="text" id="env_url" bind:value={newEnv.url} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="env_user" class="block text-sm font-medium text-gray-700">Username</label>
<input type="text" id="env_user" bind:value={newEnv.username} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="env_pass" class="block text-sm font-medium text-gray-700">Password</label>
<input type="password" id="env_pass" bind:value={newEnv.password} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div class="flex items-center mt-6">
<input type="checkbox" id="env_default" bind:checked={newEnv.is_default} class="h-4 w-4 text-blue-600 border-gray-300 rounded" />
<label for="env_default" class="ml-2 block text-sm text-gray-900">Default Environment</label>
</div>
</div>
<h3 class="text-lg font-medium mb-4 mt-6">Backup Schedule</h3>
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div class="flex items-center">
<input type="checkbox" id="backup_enabled" bind:checked={newEnv.backup_schedule.enabled} class="h-4 w-4 text-blue-600 border-gray-300 rounded" />
<label for="backup_enabled" class="ml-2 block text-sm text-gray-900">Enable Automatic Backups</label>
</div>
<div>
<label for="cron_expression" class="block text-sm font-medium text-gray-700">Cron Expression</label>
<input type="text" id="cron_expression" bind:value={newEnv.backup_schedule.cron_expression} placeholder="0 0 * * *" class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
<p class="text-xs text-gray-500 mt-1">Example: 0 0 * * * (daily at midnight)</p>
</div>
</div>
<div class="mt-6 flex gap-2 justify-end">
<button
on:click={() => { isAddingEnv = false; editingEnvId = null; resetEnvForm(); }}
class="bg-gray-200 text-gray-700 px-4 py-2 rounded hover:bg-gray-300"
>
Cancel
</button>
<button
on:click={handleAddOrUpdateEnv}
class="bg-blue-600 text-white px-4 py-2 rounded hover:bg-blue-700"
>
{editingEnvId ? 'Update' : 'Add'} Environment
</button>
</div>
</div>
{/if}
{#if settings.environments && settings.environments.length > 0} {#if settings.environments && settings.environments.length > 0}
<div class="mt-6"> <div class="mt-6 overflow-x-auto border border-gray-200 rounded-lg">
<table class="min-w-full divide-y divide-gray-200"> <table class="min-w-full divide-y divide-gray-200">
<thead class="bg-gray-50"> <thead class="bg-gray-50">
<tr> <tr>
@@ -207,13 +387,21 @@
<td class="px-6 py-4 whitespace-nowrap">{env.name}</td> <td class="px-6 py-4 whitespace-nowrap">{env.name}</td>
<td class="px-6 py-4 whitespace-nowrap">{env.url}</td> <td class="px-6 py-4 whitespace-nowrap">{env.url}</td>
<td class="px-6 py-4 whitespace-nowrap">{env.username}</td> <td class="px-6 py-4 whitespace-nowrap">{env.username}</td>
<td class="px-6 py-4 whitespace-nowrap">{env.is_default ? 'Yes' : 'No'}</td>
<td class="px-6 py-4 whitespace-nowrap"> <td class="px-6 py-4 whitespace-nowrap">
{#if env.is_default}
<span class="px-2 inline-flex text-xs leading-5 font-semibold rounded-full bg-green-100 text-green-800">
Yes
</span>
{:else}
<span class="text-gray-500">No</span>
{/if}
</td>
<td class="px-6 py-4 whitespace-nowrap text-right text-sm font-medium">
<button class="text-green-600 hover:text-green-900 mr-4" on:click={() => handleTestEnv(env.id)}> <button class="text-green-600 hover:text-green-900 mr-4" on:click={() => handleTestEnv(env.id)}>
{$t.settings?.env_test || "Test"} {$t.settings?.env_test || "Test"}
</button> </button>
<button class="text-indigo-600 hover:text-indigo-900 mr-4" on:click={() => editEnv(env)}> <button class="text-indigo-600 hover:text-indigo-900 mr-4" on:click={() => editEnv(env)}>
{$t.common.edit} {$t.common.edit || "Edit"}
</button> </button>
<button class="text-red-600 hover:text-red-900" on:click={() => handleDeleteEnv(env.id)}> <button class="text-red-600 hover:text-red-900" on:click={() => handleDeleteEnv(env.id)}>
{$t.settings?.env_delete || "Delete"} {$t.settings?.env_delete || "Delete"}
@@ -224,29 +412,13 @@
</tbody> </tbody>
</table> </table>
</div> </div>
{:else if !isAddingEnv}
<div class="mb-4 p-4 bg-yellow-100 border-l-4 border-yellow-500 text-yellow-700">
<p class="font-bold">Warning</p>
<p>No Superset environments configured. You must add at least one environment to perform backups or migrations.</p>
</div>
{/if} {/if}
</div> </div>
{:else if activeTab === 'connections'}
<!-- Connections Tab -->
<div class="text-lg font-medium mb-4">
<h2 class="text-xl font-bold mb-4">{$t.settings?.connections || 'Database Connections'}</h2>
<p class="text-gray-600 mb-6">
{$t.settings?.connections_description || 'Configure database connections for data mapping.'}
</p>
</div>
{:else if activeTab === 'llm'}
<!-- LLM Tab -->
<div class="text-lg font-medium mb-4">
<h2 class="text-xl font-bold mb-4">{$t.settings?.llm || 'LLM Providers'}</h2>
<p class="text-gray-600 mb-6">
{$t.settings?.llm_description || 'Configure LLM providers for dataset documentation.'}
</p>
<div class="flex justify-end mb-6">
<button class="bg-blue-600 text-white px-4 py-2 rounded-lg hover:bg-blue-700">
{$t.llm?.add_provider || 'Add Provider'}
</button>
</div>
</div>
{:else if activeTab === 'logging'} {:else if activeTab === 'logging'}
<!-- Logging Tab --> <!-- Logging Tab -->
<div class="text-lg font-medium mb-4"> <div class="text-lg font-medium mb-4">
@@ -254,6 +426,76 @@
<p class="text-gray-600 mb-6"> <p class="text-gray-600 mb-6">
{$t.settings?.logging_description || 'Configure logging and task log levels.'} {$t.settings?.logging_description || 'Configure logging and task log levels.'}
</p> </p>
<div class="bg-gray-50 p-6 rounded-lg border border-gray-200">
<div class="grid grid-cols-1 md:grid-cols-2 gap-4">
<div>
<label for="log_level" class="block text-sm font-medium text-gray-700">Log Level</label>
<select id="log_level" bind:value={settings.logging.level} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
<option value="CRITICAL">CRITICAL</option>
</select>
</div>
<div>
<label for="task_log_level" class="block text-sm font-medium text-gray-700">Task Log Level</label>
<select id="task_log_level" bind:value={settings.logging.task_log_level} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
</select>
</div>
<div class="md:col-span-2">
<label class="flex items-center">
<input type="checkbox" id="enable_belief_state" bind:checked={settings.logging.enable_belief_state} class="h-4 w-4 text-blue-600 border-gray-300 rounded" />
<span class="ml-2 block text-sm text-gray-900">Enable Belief State Logging (Beta)</span>
</label>
<p class="text-xs text-gray-500 mt-1 ml-6">Logs agent reasoning and internal state changes for debugging.</p>
</div>
</div>
<div class="mt-6 flex justify-end">
<button
on:click={handleSave}
class="bg-blue-600 text-white px-4 py-2 rounded hover:bg-blue-700"
>
Save Logging Config
</button>
</div>
</div>
</div>
{:else if activeTab === 'connections'}
<!-- Connections Tab -->
<div class="text-lg font-medium mb-4">
<h2 class="text-xl font-bold mb-4">{$t.settings?.connections || 'Database Connections'}</h2>
<p class="text-gray-600 mb-6">
{$t.settings?.connections_description || 'Configure database connections for data mapping.'}
</p>
{#if settings.connections && settings.connections.length > 0}
<!-- Connections list would go here -->
<p class="text-gray-500 italic">No additional connections configured. Superset database connections are used by default.</p>
{:else}
<div class="text-center py-8 bg-gray-50 rounded-lg border border-dashed border-gray-300">
<p class="text-gray-500">No external connections configured.</p>
<button class="mt-4 px-4 py-2 border border-blue-600 text-blue-600 rounded hover:bg-blue-50">
Add Connection
</button>
</div>
{/if}
</div>
{:else if activeTab === 'llm'}
<!-- LLM Tab -->
<div class="text-lg font-medium mb-4">
<h2 class="text-xl font-bold mb-4">{$t.settings?.llm || 'LLM Providers'}</h2>
<p class="text-gray-600 mb-6">
{$t.settings?.llm_description || 'Configure LLM providers for dataset documentation.'}
</p>
<ProviderConfig providers={settings.llm_providers || []} onSave={loadSettings} />
</div> </div>
{:else if activeTab === 'storage'} {:else if activeTab === 'storage'}
<!-- Storage Tab --> <!-- Storage Tab -->
@@ -262,9 +504,36 @@
<p class="text-gray-600 mb-6"> <p class="text-gray-600 mb-6">
{$t.settings?.storage_description || 'Configure file storage paths and patterns.'} {$t.settings?.storage_description || 'Configure file storage paths and patterns.'}
</p> </p>
<div class="bg-gray-50 p-6 rounded-lg border border-gray-200">
<div class="grid grid-cols-1 gap-4">
<div>
<label for="storage_path" class="block text-sm font-medium text-gray-700">Root Path</label>
<input type="text" id="storage_path" bind:value={settings.storage.root_path} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="backup_path" class="block text-sm font-medium text-gray-700">Backup Path</label>
<input type="text" id="backup_path" bind:value={settings.storage.backup_path} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
<div>
<label for="repo_path" class="block text-sm font-medium text-gray-700">Repository Path</label>
<input type="text" id="repo_path" bind:value={settings.storage.repo_path} class="mt-1 block w-full border border-gray-300 rounded-md shadow-sm p-2" />
</div>
</div>
<div class="mt-6 flex justify-end">
<button
on:click={() => handleSave()}
class="bg-blue-600 text-white px-4 py-2 rounded hover:bg-blue-700"
>
Save Storage Config
</button>
</div>
</div>
</div> </div>
{/if} {/if}
</div> </div>
{/if}
</div> </div>
<!-- [/DEF:SettingsPage:Page] --> <!-- [/DEF:SettingsPage:Page] -->

View File

@@ -0,0 +1,16 @@
<!-- [DEF:StorageIndexPage:Page] -->
<!--
@TIER: TRIVIAL
@PURPOSE: Redirect to the backups page as the default storage view.
@LAYER: Page
@INVARIANT: Always redirects to /storage/backups.
-->
<script>
import { onMount } from 'svelte';
import { goto } from '$app/navigation';
onMount(() => {
goto('/storage/backups');
});
</script>
<!-- [/DEF:StorageIndexPage:Page] -->

View File

@@ -0,0 +1,35 @@
<!-- [DEF:StorageBackupsPage:Page] -->
<!--
@TIER: STANDARD
@SEMANTICS: backup, page, tools
@PURPOSE: Entry point for the Backup Management interface (moved from /tools/backups).
@LAYER: Page
@RELATION: USES -> BackupManager
@INVARIANT: BackupManager component is always rendered.
-->
<script lang="ts">
/**
* @UX_STATE: Loading -> (via BackupManager) showing spinner.
* @UX_STATE: Idle -> Showing BackupManager interface.
* @UX_FEEDBACK: Toast -> (via BackupManager) success/error notifications.
*/
// [SECTION: IMPORTS]
import { t } from '$lib/i18n';
import { PageHeader } from '$lib/ui';
import BackupManager from '../../../components/backups/BackupManager.svelte';
// [/SECTION]
</script>
<!-- [SECTION: TEMPLATE] -->
<div class="container mx-auto p-4 max-w-6xl">
<PageHeader title={$t.nav?.backups || "Backups"} />
<div class="mt-6">
<BackupManager />
</div>
</div>
<!-- [/SECTION] -->
<!-- [/DEF:StorageBackupsPage:Page] -->

View File

@@ -0,0 +1,110 @@
<!-- [DEF:frontend.src.routes.storage.repos.+page:Module] -->
<!--
@TIER: STANDARD
@SEMANTICS: git, dashboard, management, ui
@PURPOSE: Dashboard management page for Git integration (moved from /git).
@LAYER: UI (Page)
@RELATION: DEPENDS_ON -> DashboardGrid
@RELATION: DEPENDS_ON -> api
@INVARIANT: Dashboard grid is always shown when an environment is selected.
-->
<!-- [DEF:StorageReposPage:Page] -->
<script lang="ts">
/**
* @UX_STATE: Loading -> Showing spinner while fetching environments/dashboards.
* @UX_STATE: Idle -> Showing dashboard grid with actions.
* @UX_FEEDBACK: Toast -> Error messages on fetch failure.
* @UX_RECOVERY: Environment Selection -> Switch environment to retry loading.
*/
import { onMount } from 'svelte';
import DashboardGrid from '../../../components/DashboardGrid.svelte';
import { addToast as toast } from '$lib/toasts.js';
import { api } from '$lib/api.js';
import type { DashboardMetadata } from '$lib/types/dashboard';
import { t } from '$lib/i18n';
import { Button, Card, PageHeader, Select } from '$lib/ui';
let environments: any[] = [];
let selectedEnvId = "";
let dashboards: DashboardMetadata[] = [];
let loading = true;
let fetchingDashboards = false;
// [DEF:fetchEnvironments:Function]
/**
* @PURPOSE: Fetches the list of available environments.
* @PRE: None.
* @POST: environments array is populated, selectedEnvId is set to first env if available.
*/
async function fetchEnvironments() {
try {
environments = await api.getEnvironmentsList();
if (environments.length > 0) {
selectedEnvId = environments[0].id;
}
} catch (e) {
toast(e.message, 'error');
} finally {
loading = false;
}
}
// [/DEF:fetchEnvironments:Function]
// [DEF:fetchDashboards:Function]
/**
* @PURPOSE: Fetches dashboards for a specific environment.
* @PRE: envId is a valid environment ID.
* @POST: dashboards array is populated with metadata for the selected environment.
*/
async function fetchDashboards(envId: string) {
if (!envId) return;
fetchingDashboards = true;
try {
dashboards = await api.requestApi(`/environments/${envId}/dashboards`);
} catch (e) {
toast(e.message, 'error');
dashboards = [];
} finally {
fetchingDashboards = false;
}
}
// [/DEF:fetchDashboards:Function]
onMount(fetchEnvironments);
$: if (selectedEnvId) {
fetchDashboards(selectedEnvId);
localStorage.setItem('selected_env_id', selectedEnvId);
}
</script>
<div class="max-w-6xl mx-auto p-6">
<PageHeader title={$t.nav?.repositories || "Git Repositories"}>
<div slot="actions" class="flex items-center space-x-4">
<Select
label="Environment"
bind:value={selectedEnvId}
options={environments.map(e => ({ value: e.id, label: e.name }))}
/>
</div>
</PageHeader>
{#if loading}
<div class="flex justify-center py-12">
<div class="animate-spin rounded-full h-8 w-8 border-b-2 border-blue-600"></div>
</div>
{:else}
<Card title="Select Dashboard to Manage">
{#if fetchingDashboards}
<p class="text-gray-500">Loading dashboards...</p>
{:else if dashboards.length > 0}
<DashboardGrid {dashboards} />
{:else}
<p class="text-gray-500 italic">No dashboards found in this environment.</p>
{/if}
</Card>
{/if}
</div>
<!-- [/DEF:StorageReposPage:Page] -->
<!-- [/DEF:frontend.src.routes.storage.repos.+page:Module] -->

BIN
frontend/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 523 B

View File

@@ -6,6 +6,10 @@ const config = {
preprocess: vitePreprocess(), preprocess: vitePreprocess(),
kit: { kit: {
alias: {
'$components': 'src/components',
'$lib': 'src/lib'
},
adapter: adapter({ adapter: adapter({
pages: 'build', pages: 'build',
assets: 'build', assets: 'build',

View File

@@ -1,298 +0,0 @@
PS H:\dev\ss-tools> & C:/ProgramData/anaconda3/python.exe h:/dev/ss-tools/migration_script.py
2025-12-16 11:50:28,192 - INFO - [run][Entry] Запуск скрипта миграции.
=== Поведение при ошибке импорта ===
Если импорт завершится ошибкой, удалить существующий дашборд и попытаться импортировать заново? (y/n): n
2025-12-16 11:50:33,363 - INFO - [ask_delete_on_failure][State] Delete-on-failure = False
2025-12-16 11:50:33,368 - INFO - [select_environments][Entry] Шаг 1/5: Выбор окружений.
2025-12-16 11:50:33,374 - INFO - [setup_clients][Enter] Starting Superset clients initialization.
2025-12-16 11:50:33,730 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,734 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,739 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,742 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,746 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,750 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,754 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,758 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,761 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,764 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,769 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,772 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,776 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,779 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,782 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,786 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,790 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,794 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,799 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,805 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,808 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,811 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,815 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,820 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,823 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,827 - INFO - [SupersetClient.__init__][Enter] Initializing SupersetClient.
2025-12-16 11:50:33,831 - INFO - [APIClient.__init__][Entry] Initializing APIClient.
2025-12-16 11:50:33,834 - WARNING - [_init_session][State] SSL verification disabled.
2025-12-16 11:50:33,838 - INFO - [APIClient.__init__][Exit] APIClient initialized.
2025-12-16 11:50:33,840 - INFO - [SupersetClient.__init__][Exit] SupersetClient initialized.
2025-12-16 11:50:33,847 - INFO - [setup_clients][Exit] All clients (dev, prod, sbx, preprod, uatta, dev5) initialized successfully.
=== Выбор окружения ===
Исходное окружение:
1) dev
2) prod
3) sbx
4) preprod
5) uatta
6) dev5
Введите номер (0 отмена): 4
2025-12-16 11:50:42,379 - INFO - [select_environments][State] from = preprod
=== Выбор окружения ===
Целевое окружение:
1) dev
2) prod
3) sbx
4) uatta
5) dev5
Введите номер (0 отмена): 5
2025-12-16 11:50:45,176 - INFO - [select_environments][State] to = dev5
2025-12-16 11:50:45,182 - INFO - [select_environments][Exit] Шаг 1 завершён.
2025-12-16 11:50:45,186 - INFO - [select_dashboards][Entry] Шаг 2/5: Выбор дашбордов.
2025-12-16 11:50:45,190 - INFO - [get_dashboards][Enter] Fetching dashboards.
2025-12-16 11:50:45,197 - INFO - [authenticate][Enter] Authenticating to https://preprodta.bi.dwh.rusal.com/api/v1
2025-12-16 11:50:45,880 - INFO - [authenticate][Exit] Authenticated successfully.
2025-12-16 11:50:46,025 - INFO - [get_dashboards][Exit] Found 95 dashboards.
=== Поиск ===
Введите регулярное выражение для поиска дашбордов:
fi
=== Выбор дашбордов ===
Отметьте нужные дашборды (введите номера):
1) [ALL] Все дашборды
2) [185] FI-0060 Финансы. Налоги. Данные по налогам. Старый
3) [184] FI-0083 Статистика по ДЗ/ПДЗ
4) [187] FI-0081 ПДЗ Казначейство
5) [122] FI-0080 Финансы. Оборотный Капитал ДЗ/КЗ
6) [208] FI-0020 Просроченная дебиторская и кредиторская задолженность в динамике
7) [126] FI-0022 Кредиторская задолженность для казначейства
8) [196] FI-0023 Дебиторская задолженность для казначейства
9) [113] FI-0060 Финансы. Налоги. Данные по налогам.
10) [173] FI-0040 Оборотно-сальдовая ведомость (ОСВ) по контрагентам
11) [174] FI-0021 Дебиторская и кредиторская задолженность по документам
12) [172] FI-0030 Дебиторская задолженность по штрафам
13) [170] FI-0050 Налог на прибыль (ОНА и ОНО)
14) [159] FI-0070 Досье контрагента
Введите номера через запятую (пустой ввод → отказ): 2
2025-12-16 11:50:52,235 - INFO - [select_dashboards][State] Выбрано 1 дашбордов.
2025-12-16 11:50:52,242 - INFO - [select_dashboards][Exit] Шаг 2 завершён.
=== Замена БД ===
Заменить конфигурацию БД в YAMLфайлах? (y/n): y
2025-12-16 11:50:53,808 - INFO - [_select_databases][Entry] Selecting databases from both environments.
2025-12-16 11:50:53,816 - INFO - [get_databases][Enter] Fetching databases.
2025-12-16 11:50:53,918 - INFO - [get_databases][Exit] Found 12 databases.
2025-12-16 11:50:53,923 - INFO - [get_databases][Enter] Fetching databases.
2025-12-16 11:50:53,926 - INFO - [authenticate][Enter] Authenticating to https://dev.bi.dwh.rusal.com/api/v1
2025-12-16 11:50:54,450 - INFO - [authenticate][Exit] Authenticated successfully.
2025-12-16 11:50:54,551 - INFO - [get_databases][Exit] Found 4 databases.
=== Выбор исходной БД ===
Выберите исходную БД:
1) DEV datalab (ID: 9)
2) Prod Greenplum (ID: 7)
3) DEV Clickhouse New (OLD) (ID: 16)
4) Preprod Clickhouse New (ID: 15)
5) DEV Greenplum (ID: 1)
6) Prod Clickhouse Node 1 (ID: 11)
7) Preprod Postgre Superset Internal (ID: 5)
8) Prod Postgre Superset Internal (ID: 28)
9) Prod Clickhouse (ID: 10)
10) Dev Clickhouse (correct) (ID: 14)
11) DEV ClickHouse New (ID: 23)
12) Sandbox Postgre Superset Internal (ID: 12)
Введите номер (0 отмена): 9
2025-12-16 11:51:11,008 - INFO - [get_database][Enter] Fetching database 10.
2025-12-16 11:51:11,038 - INFO - [get_database][Exit] Got database 10.
=== Выбор целевой БД ===
Выберите целевую БД:
1) DEV Greenplum (ID: 2)
2) DEV Clickhouse (ID: 3)
3) DEV ClickHouse New (ID: 4)
4) Dev Postgre Superset Internal (ID: 1)
Введите номер (0 отмена): 2
2025-12-16 11:51:15,559 - INFO - [get_database][Enter] Fetching database 3.
2025-12-16 11:51:15,586 - INFO - [get_database][Exit] Got database 3.
2025-12-16 11:51:15,589 - INFO - [_select_databases][Exit] Selected databases: Без имени -> Без имени
old_db: {'id': 10, 'result': {'allow_ctas': False, 'allow_cvas': False, 'allow_dml': True, 'allow_file_upload': False, 'allow_run_async': False, 'backen
d': 'clickhousedb', 'cache_timeout': None, 'configuration_method': 'sqlalchemy_form', 'database_name': 'Prod Clickhouse', 'driver': 'connect', 'engine_i
nformation': {'disable_ssh_tunneling': False, 'supports_file_upload': False}, 'expose_in_sqllab': True, 'force_ctas_schema': None, 'id': 10, 'impersonat
e_user': False, 'is_managed_externally': False, 'uuid': '97aced68-326a-4094-b381-27980560efa9'}}
2025-12-16 11:51:15,591 - INFO - [confirm_db_config_replacement][State] Replacement set: {'old': {'database_name': None, 'uuid': None, 'id': '10'}, 'new
': {'database_name': None, 'uuid': None, 'id': '3'}}
2025-12-16 11:51:15,594 - INFO - [execute_migration][Entry] Starting migration of 1 dashboards.
=== Миграция... ===
Миграция: FI-0060 Финансы. Налоги. Данные по налогам. Старый (1/1) 0%2025-12-16 11:51:15,598 - INFO - [export_dashboard][Enter] Exporting dashboard 185.
2025-12-16 11:51:16,142 - INFO - [export_dashboard][Exit] Exported dashboard 185 to dashboard_export_20251216T085115.zip.
2025-12-16 11:51:16,205 - INFO - [update_yamls][Enter] Starting YAML configuration update.
2025-12-16 11:51:16,208 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\metadata.yaml
2025-12-16 11:51:16,209 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-01_2787.yaml
2025-12-16 11:51:16,210 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_2_4030.yaml
2025-12-16 11:51:16,212 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_4029.yaml
2025-12-16 11:51:16,213 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_TOTAL2_4036.yaml
2025-12-16 11:51:16,215 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_TOTAL2_4037.yaml
2025-12-16 11:51:16,216 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_TOTAL_4028.yaml
2025-12-16 11:51:16,217 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_ZNODE_ROOT2_4024.yaml
2025-12-16 11:51:16,218 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-01_ZNODE_ROOT_4033.yaml
2025-12-16 11:51:16,220 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-02_ZFUND-BD2_4021.yaml
2025-12-16 11:51:16,221 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-02_ZFUND_4027.yaml
2025-12-16 11:51:16,222 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02-02_ZFUND_4034.yaml
2025-12-16 11:51:16,224 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02_ZTAX_4022.yaml
2025-12-16 11:51:16,226 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-02_ZTAX_4035.yaml
2025-12-16 11:51:16,227 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-04-2_4031.yaml
2025-12-16 11:51:16,228 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-05-01_4026.yaml
2025-12-16 11:51:16,230 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-05-01_4032.yaml
2025-12-16 11:51:16,231 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-06_1_4023.yaml
2025-12-16 11:51:16,233 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060-06_2_4020.yaml
2025-12-16 11:51:16,234 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\charts\FI-0060_4025.yaml
2025-12-16 11:51:16,236 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\dashboards\FI-0060_185.yaml
2025-12-16 11:51:16,238 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\databases\Prod_Clickhouse_10.yaml
2025-12-16 11:51:16,240 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0000_-_685.yaml
2025-12-16 11:51:16,241 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-01-2_zfund_reciever_-_861.yaml
2025-12-16 11:51:16,242 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-01_zfund_reciever_click_689.yaml
2025-12-16 11:51:16,244 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-02_680.yaml
2025-12-16 11:51:16,245 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-03_ztax_862.yaml
2025-12-16 11:51:16,246 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-04_zpbe_681.yaml
2025-12-16 11:51:16,247 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-05_ZTAXZFUND_679.yaml
2025-12-16 11:51:16,249 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-06_860.yaml
2025-12-16 11:51:16,250 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-08_682.yaml
2025-12-16 11:51:16,251 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-10_zpbe_688.yaml
2025-12-16 11:51:16,253 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060-11_ZTAX_NAME_863.yaml
2025-12-16 11:51:16,254 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_683.yaml
2025-12-16 11:51:16,255 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_684.yaml
2025-12-16 11:51:16,256 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_686.yaml
2025-12-16 11:51:16,258 - INFO - [_update_yaml_file][State] Replaced '10' with '3' for key id in C:\Users\LO54FB~1\Temp\tmpuidfegpd.dir\dashboard_export
_20251216T085115\datasets\Prod_Clickhouse_10\FI-0060_690.yaml
2025-12-16 11:51:16,259 - INFO - [create_dashboard_export][Enter] Packing dashboard: ['C:\\Users\\LO54FB~1\\Temp\\tmpuidfegpd.dir'] -> C:\Users\LO54FB~1
\Temp\tmps7cuv2ti.zip
2025-12-16 11:51:16,347 - INFO - [create_dashboard_export][Exit] Archive created: C:\Users\LO54FB~1\Temp\tmps7cuv2ti.zip
2025-12-16 11:51:16,372 - ERROR - [import_dashboard][Failure] First import attempt failed: [API_FAILURE] API error during upload: {"errors": [{"message"
: "Expecting value: line 1 column 1 (char 0)", "error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "messag
e": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448
\u0438\u0431\u043a\u0430."}]}}]} | Context: {'type': 'api_call'}
Traceback (most recent call last):
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 186, in _perform_upload
response.raise_for_status()
File "C:\ProgramData\anaconda3\Lib\site-packages\requests\models.py", line 1021, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 500 Server Error: INTERNAL SERVER ERROR for url: https://dev.bi.dwh.rusal.com/api/v1/dashboard/import/
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "h:\dev\ss-tools\superset_tool\client.py", line 141, in import_dashboard
return self._do_import(file_path)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\client.py", line 197, in _do_import
return self.network.upload_file(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 172, in upload_file
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 196, in _perform_upload
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
superset_tool.exceptions.SupersetAPIError: [API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 column 1 (char 0)", "
error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437
\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]}}]} | Context: {'ty
pe': 'api_call'}
2025-12-16 11:51:16,511 - ERROR - [execute_migration][Failure] [API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 c
olumn 1 (char 0)", "error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u04
40\u043e\u0438\u0437\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]
}}]} | Context: {'type': 'api_call'}
Traceback (most recent call last):
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 186, in _perform_upload
response.raise_for_status()
File "C:\ProgramData\anaconda3\Lib\site-packages\requests\models.py", line 1021, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 500 Server Error: INTERNAL SERVER ERROR for url: https://dev.bi.dwh.rusal.com/api/v1/dashboard/import/
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "h:\dev\ss-tools\migration_script.py", line 366, in execute_migration
self.to_c.import_dashboard(file_name=tmp_new_zip, dash_id=dash_id, dash_slug=dash_slug)
File "h:\dev\ss-tools\superset_tool\client.py", line 141, in import_dashboard
return self._do_import(file_path)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\client.py", line 197, in _do_import
return self.network.upload_file(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 172, in upload_file
return self._perform_upload(full_url, files_payload, extra_data, _headers, timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "h:\dev\ss-tools\superset_tool\utils\network.py", line 196, in _perform_upload
raise SupersetAPIError(f"API error during upload: {e.response.text}") from e
superset_tool.exceptions.SupersetAPIError: [API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 column 1 (char 0)", "
error_type": "GENERIC_BACKEND_ERROR", "level": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437
\u043e\u0448\u043b\u0430 \u043d\u0435\u0438\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]}}]} | Context: {'ty
pe': 'api_call'}
=== Ошибка ===
Не удалось мигрировать дашборд FI-0060 Финансы. Налоги. Данные по налогам. Старый.
[API_FAILURE] API error during upload: {"errors": [{"message": "Expecting value: line 1 column 1 (char 0)", "error_type": "GENERIC_BACKEND_ERROR", "leve
l": "error", "extra": {"issue_codes": [{"code": 1011, "message": "Issue 1011 - \u041f\u0440\u043e\u0438\u0437\u043e\u0448\u043b\u0430 \u043d\u0435\u0438
\u0437\u0432\u0435\u0441\u0442\u043d\u0430\u044f \u043e\u0448\u0438\u0431\u043a\u0430."}]}}]} | Context: {'type': 'api_call'}
100%
2025-12-16 11:51:16,598 - INFO - [execute_migration][Exit] Migration finished.
=== Информация ===
Миграция завершена!
2025-12-16 11:51:16,605 - INFO - [run][Exit] Скрипт миграции завершён.

View File

@@ -0,0 +1,36 @@
import requests
import json
import sys
# Try to find the port from app.py or common defaults
BASE_URL = "http://127.0.0.1:8000/api"
def test_save_mapping():
payload = {
"source_env_id": "ss1",
"target_env_id": "ss2",
"source_db_uuid": "test-uuid-1",
"target_db_uuid": "test-uuid-2",
"source_db_name": "Test Source DB",
"target_db_name": "Test Target DB"
}
print(f"Sending request to {BASE_URL}/mappings with payload: {json.dumps(payload, indent=2)}")
try:
# Note: We might need authentication headers if has_permission is active
# In a real tool use, we'd need to handle that, but for local testing
# let's see if the server is even running and if we get a 401/403 or something else.
response = requests.post(f"{BASE_URL}/mappings", json=payload)
print(f"Status Code: {response.status_code}")
try:
print(f"Response Body: {json.dumps(response.json(), indent=2)}")
except:
print(f"Raw Response: {response.text}")
except Exception as e:
print(f"Error: {e}")
if __name__ == "__main__":
if len(sys.argv) > 1:
BASE_URL = sys.argv[1]
test_save_mapping()

File diff suppressed because it is too large Load Diff

View File

@@ -353,6 +353,7 @@ export const activityStore = derived(
<!-- @UX_FEEDBACK: Mapped % column shows progress bar + percentage text --> <!-- @UX_FEEDBACK: Mapped % column shows progress bar + percentage text -->
<!-- @UX_FEEDBACK: Tables column shows count of SQL tables extracted --> <!-- @UX_FEEDBACK: Tables column shows count of SQL tables extracted -->
<!-- @UX_FEEDBACK: Columns column shows "X/Y" format (mapped/total) --> <!-- @UX_FEEDBACK: Columns column shows "X/Y" format (mapped/total) -->
<!-- @UX_FEEDBACK: Start Mapping button is disabled until valid source is configured -->
<!-- @UX_RECOVERY: Failed mapping shows error toast with "Retry" action --> <!-- @UX_RECOVERY: Failed mapping shows error toast with "Retry" action -->
<!-- @PRE: User has permission plugin:mapper:execute for Map Columns --> <!-- @PRE: User has permission plugin:mapper:execute for Map Columns -->

View File

@@ -165,13 +165,14 @@ All implementation tasks MUST follow the Design-by-Contract specifications:
- [x] T032 [P] [US3] Create `backend/src/services/resource_service.py` for shared resource fetching logic - [x] T032 [P] [US3] Create `backend/src/services/resource_service.py` for shared resource fetching logic
_Contract: [DEF:ResourceService:Class](./contracts/modules.md#13-resourceservice)_ _Contract: [DEF:ResourceService:Class](./contracts/modules.md#13-resourceservice)_
- [x] T033 [US3] Implement dashboard list fetching with Git status and last task status - [x] T033 [US3] Implement dashboard list fetching with Git status and last task status
- [ ] T034 [US3] Add pagination support to GET /api/dashboards endpoint (page, page_size parameters) - [x] T034 [US3] Add pagination support to GET /api/dashboards endpoint (page, page_size parameters)
_Contract: @POST: Response includes pagination metadata_ _Contract: @POST: Response includes pagination metadata_
- [ ] T035 [US3] Implement bulk migration endpoint POST /api/dashboards/migrate with target environment and dashboard IDs - [x] T035 [US3] Implement bulk migration endpoint POST /api/dashboards/migrate with target environment and dashboard IDs
_Contract: @PRE: User has permission plugin:migration:execute_ _Contract: @PRE: User has permission plugin:migration:execute_
- [ ] T036 [US3] Implement bulk backup endpoint POST /api/dashboards/backup with optional cron schedule - [x] T036 [US3] Implement bulk backup endpoint POST /api/dashboards/backup with optional cron schedule
_Contract: @PRE: User has permission plugin:backup:execute_ _Contract: @PRE: User has permission plugin:backup:execute_
- [ ] T037 [US3] Add database mappings retrieval from MappingService for migration modal - [x] T037 [US3] Add database mappings retrieval from MappingService for migration modal
- [x] T064 [US3] Fix "API endpoint not found" for databases by correcting endpoint path in `frontend/src/lib/api.js`
### Frontend for User Story 3 ### Frontend for User Story 3
@@ -184,17 +185,17 @@ All implementation tasks MUST follow the Design-by-Contract specifications:
- [x] T039 [US3] Implement environment selector dropdown at top of Dashboard Hub - [x] T039 [US3] Implement environment selector dropdown at top of Dashboard Hub
- [x] T040 [US3] Create dashboard grid with checkboxes, columns: Title, Slug, Git Status, Last Task, Actions - [x] T040 [US3] Create dashboard grid with checkboxes, columns: Title, Slug, Git Status, Last Task, Actions
_Contract: @UX_STATE: Idle-Grid, @UX_FEEDBACK: Git status color-coded icons_ _Contract: @UX_STATE: Idle-Grid, @UX_FEEDBACK: Git status color-coded icons_
- [ ] T041 [US3] Implement "Select All" and "Select Visible" buttons in toolbar - [x] T041 [US3] Implement "Select All" and "Select Visible" buttons in toolbar
_Contract: @UX_STATE: Selecting_ _Contract: @UX_STATE: Selecting_
- [ ] T042 [US3] Add real-time search input that filters dashboard list - [x] T042 [US3] Add real-time search input that filters dashboard list
_Contract: @POST: Search filters results in real-time (debounced 300ms)_ _Contract: @POST: Search filters results in real-time (debounced 300ms)_
- [ ] T043 [US3] Implement pagination controls with page numbers and "Rows per page" dropdown - [x] T043 [US3] Implement pagination controls with page numbers and "Rows per page" dropdown
_Contract: @INVARIANT: Selection persists across pagination_ _Contract: @INVARIANT: Selection persists across pagination_
- [ ] T044 [US3] Create floating bulk action panel at bottom: "[✓ N selected] [Migrate] [Backup]" - [x] T044 [US3] Create floating bulk action panel at bottom: "[✓ N selected] [Migrate] [Backup]"
_Contract: @UX_FEEDBACK: Floating panel slides up from bottom_ _Contract: @UX_FEEDBACK: Floating panel slides up from bottom_
- [ ] T045 [US3] Implement Bulk Migration modal with target environment, database mappings, and selected dashboards list - [x] T045 [US3] Implement Bulk Migration modal with target environment, database mappings, and selected dashboards list
_Contract: @UX_STATE: BulkAction-Modal_ _Contract: @UX_STATE: BulkAction-Modal_
- [ ] T046 [US3] Implement Bulk Backup modal with one-time/scheduled options and cron expression - [x] T046 [US3] Implement Bulk Backup modal with one-time/scheduled options and cron expression
- [x] T047 [US3] Implement individual Actions menu with Migrate, Backup, Git Operations options - [x] T047 [US3] Implement individual Actions menu with Migrate, Backup, Git Operations options
- [x] T048 [US3] Connect Actions menu to existing plugin triggers (Migration, Backup, Git) - [x] T048 [US3] Connect Actions menu to existing plugin triggers (Migration, Backup, Git)
_Contract: @RELATION: DISPATCHES -> MigrationPlugin, BackupPlugin_ _Contract: @RELATION: DISPATCHES -> MigrationPlugin, BackupPlugin_
@@ -202,7 +203,7 @@ All implementation tasks MUST follow the Design-by-Contract specifications:
_Contract: @POST: Clicking status badge opens TaskDrawer with that task_ _Contract: @POST: Clicking status badge opens TaskDrawer with that task_
- [x] T050 [US3] Add empty state when no environments configured or no dashboards found - [x] T050 [US3] Add empty state when no environments configured or no dashboards found
_Contract: @UX_STATE: Empty-NoEnv, Empty-NoData_ _Contract: @UX_STATE: Empty-NoEnv, Empty-NoData_
- [ ] T051 [US3] Verify implementation matches ux_reference.md (Dashboard Hub Grid mockup) - [x] T051 [US3] Verify implementation matches ux_reference.md (Dashboard Hub Grid mockup)
**Checkpoint**: Dashboard Hub fully functional with bulk operations **Checkpoint**: Dashboard Hub fully functional with bulk operations
@@ -224,12 +225,12 @@ All implementation tasks MUST follow the Design-by-Contract specifications:
_Contract: [DEF:DatasetsAPI:Module](./contracts/modules.md#11-datasets-api) - CRITICAL_ _Contract: [DEF:DatasetsAPI:Module](./contracts/modules.md#11-datasets-api) - CRITICAL_
- [x] T053 [US4] Implement dataset list fetching with mapped fields count and SQL table extraction - [x] T053 [US4] Implement dataset list fetching with mapped fields count and SQL table extraction
_Contract: @INVARIANT: Mapped % is calculated as (mapped_columns / total_columns) * 100_ _Contract: @INVARIANT: Mapped % is calculated as (mapped_columns / total_columns) * 100_
- [ ] T054 [US4] Add pagination support to GET /api/datasets endpoint (page, page_size parameters) - [x] T054 [US4] Add pagination support to GET /api/datasets endpoint (page, page_size parameters)
- [ ] T055 [US4] Implement bulk column mapping endpoint POST /api/datasets/map-columns with source selection - [x] T055 [US4] Implement bulk column mapping endpoint POST /api/datasets/map-columns with source selection
_Contract: @PRE: User has permission plugin:mapper:execute_ _Contract: @PRE: User has permission plugin:mapper:execute_
- [ ] T056 [US4] Implement bulk documentation generation endpoint POST /api/datasets/generate-docs - [x] T056 [US4] Implement bulk documentation generation endpoint POST /api/datasets/generate-docs
_Contract: @PRE: User has permission plugin:llm_analysis:execute_ _Contract: @PRE: User has permission plugin:llm_analysis:execute_
- [ ] T057 [US4] Add dataset-to-dashboard relationship retrieval for linked dashboards display - [x] T057 [US4] Add dataset-to-dashboard relationship retrieval for linked dashboards display
### Frontend for User Story 4 ### Frontend for User Story 4
@@ -241,22 +242,22 @@ All implementation tasks MUST follow the Design-by-Contract specifications:
_Contract: [DEF:DatasetHub:Component](./contracts/modules.md#8-datasethub-component) - CRITICAL_ _Contract: [DEF:DatasetHub:Component](./contracts/modules.md#8-datasethub-component) - CRITICAL_
- [x] T059 [US4] Implement dataset grid with checkboxes, columns: Name, Database, Schema, Tables, Columns, Mapped %, Updated By, Actions - [x] T059 [US4] Implement dataset grid with checkboxes, columns: Name, Database, Schema, Tables, Columns, Mapped %, Updated By, Actions
_Contract: @UX_FEEDBACK: Mapped % column shows progress bar + percentage text_ _Contract: @UX_FEEDBACK: Mapped % column shows progress bar + percentage text_
- [ ] T060 [US4] Implement "Select All" and "Select Visible" buttons in toolbar - [x] T060 [US4] Implement "Select All" and "Select Visible" buttons in toolbar
_Contract: @UX_STATE: Selecting_ _Contract: @UX_STATE: Selecting_
- [ ] T061 [US4] Add real-time search input that filters dataset list by name, schema, or table names - [x] T061 [US4] Add real-time search input that filters dataset list by name, schema, or table names
_Contract: @POST: Search filters by name, schema, and table names_ _Contract: @POST: Search filters by name, schema, and table names_
- [ ] T062 [US4] Implement pagination controls with page numbers and "Rows per page" dropdown - [x] T062 [US4] Implement pagination controls with page numbers and "Rows per page" dropdown
- [ ] T063 [US4] Create floating bulk action panel at bottom: "[✓ N selected] [Map Columns] [Generate Docs] [Validate]" - [x] T063 [US4] Create floating bulk action panel at bottom: "[✓ N selected] [Map Columns] [Generate Docs] [Validate]"
_Contract: @UX_STATE: Selecting, @UX_FEEDBACK: Floating panel slides up_ _Contract: @UX_STATE: Selecting, @UX_FEEDBACK: Floating panel slides up_
- [ ] T064 [US4] Implement Column Mapping modal with PostgreSQL comments/XLSX source selection and preview - [x] T064 [US4] Implement Column Mapping modal with PostgreSQL comments/XLSX source selection and preview
_Contract: @POST: Map Columns modal shows source selection (PostgreSQL or XLSX)_ _Contract: @POST: Map Columns modal shows source selection (PostgreSQL or XLSX) with validation_
- [ ] T065 [US4] Implement Documentation Generation modal with LLM provider selection and options - [x] T065 [US4] Implement Documentation Generation modal with LLM provider selection and options
_Contract: @POST: Generate Docs modal shows LLM provider selection_ _Contract: @POST: Generate Docs modal shows LLM provider selection_
- [ ] T066 [US4] Create dataset detail view showing SQL tables, column counts, mapping percentages, and linked dashboards - [x] T066 [US4] Create dataset detail view showing SQL tables, column counts, mapping percentages, and linked dashboards
_Contract: @UX_STATE: Detail-View, @POST: Clicking dataset name opens detail view_ _Contract: @UX_STATE: Detail-View, @POST: Clicking dataset name opens detail view_
- [x] T067 [US4] Add empty state when no datasets found - [x] T067 [US4] Add empty state when no datasets found
_Contract: @UX_STATE: Empty-NoData_ _Contract: @UX_STATE: Empty-NoData_
- [ ] T068 [US4] Verify implementation matches ux_reference.md (Dataset Hub Grid mockup) - [x] T068 [US4] Verify implementation matches ux_reference.md (Dataset Hub Grid mockup)
**Checkpoint**: Dataset Hub fully functional with bulk operations **Checkpoint**: Dataset Hub fully functional with bulk operations
@@ -308,6 +309,136 @@ All implementation tasks MUST follow the Design-by-Contract specifications:
--- ---
## UX Compliance Verification Report
**Date**: 2026-02-15
**Verifier**: QA/Tester Mode
**Status**: ✅ PASS - ALL VIOLATIONS RESOLVED
### Critical Violations
#### V001: Missing Breadcrumbs in Layout
- **Contract**: [DEF:Breadcrumbs:Component](./contracts/modules.md#9-breadcrumbs-component) - @UX_STATE: Idle
- **Expected**: Breadcrumbs visible below TopNavbar on all pages
- **Actual**: Breadcrumbs component exists but is NOT rendered in +layout.svelte
- **Evidence**: `frontend/src/routes/+layout.svelte` imports Breadcrumbs but doesn't use it in template
- **Impact**: Users cannot navigate page hierarchy as specified in UX reference
- **Fix**: Add `<Breadcrumbs />` component between TopNavbar and page content slot
- **Status**: ✅ FIXED - Breadcrumbs now rendered in layout
#### V002: TopNavbar Missing Sidebar Responsive Classes
- **Contract**: [DEF:TopNavbar:Component](./contracts/modules.md#5-topnavbar-component) - @INVARIANT: Height is fixed at 64px
- **Expected**: TopNavbar should have `with-sidebar` or `with-collapsed-sidebar` class based on sidebar state
- **Actual**: TopNavbar always uses `mobile` class regardless of screen size
- **Evidence**: `frontend/src/lib/components/layout/TopNavbar.svelte` line 185: `<nav class="navbar mobile">`
- **Impact**: Layout breaks on desktop - navbar doesn't adjust for sidebar width
- **Fix**: Pass sidebar state to TopNavbar and apply correct responsive classes
- **Status**: ✅ FIXED - TopNavbar now subscribes to sidebarStore and applies correct classes
#### V003: Sidebar Missing Collapse Button Position
- **Contract**: [DEF:Sidebar:Component](./contracts/modules.md#4-sidebar-component) - @UX_STATE: Idle-Expanded
- **Expected**: Collapse button should be at bottom of sidebar with "[◀ Collapse]" label
- **Actual**: Toggle button is in header, no collapse button at bottom
- **Evidence**: `frontend/src/lib/components/layout/Sidebar.svelte` lines 192-206 - toggle in header only
- **Impact**: UX doesn't match Superset-style sidebar pattern
- **Fix**: Add collapse button at bottom of sidebar matching ux_reference.md mockup
- **Status**: ✅ FIXED - Collapse button added to sidebar footer with "◀ Collapse" label
#### V007: Sidebar Missing Sub-Category Structure
- **Contract**: [DEF:Sidebar:Component](./contracts/modules.md#4-sidebar-component) - @UX_STATE: Category-Expanded
- **Expected**: Categories should have expandable sub-items (▽ DASHBOARDS → Overview)
- **Actual**: Sidebar has flat category list without sub-items
- **Evidence**: `frontend/src/lib/components/layout/Sidebar.svelte` lines 22-48 - flat structure
- **Impact**: Navigation structure doesn't match Superset-style mockup
- **Fix**: Implement collapsible category sections with sub-items
- **Status**: ✅ FIXED - Added expandable categories with ▽ toggle and sub-items (Overview, All Datasets, Backups, etc.)
#### V004: DashboardHub Missing "Last Task" Badge Color Coding
- **Contract**: [DEF:DashboardHub:Component](./contracts/modules.md#7-dashboardhub-component) - @UX_FEEDBACK: Last task status: badge with color
- **Expected**: Task status badges should be color-coded (green=success, red=error, blue=running)
- **Actual**: Task status text shown but no color-coded badges
- **Evidence**: `frontend/src/routes/dashboards/+page.svelte` lines 633-658 - shows text only
- **Impact**: Users cannot quickly identify task status at a glance
- **Fix**: Add status-badge classes with appropriate colors for each task state
- **Status**: ✅ FIXED - Added color-coded task-status-badge classes (running=blue, success=green, error=red, waiting=yellow)
#### V005: DashboardHub Missing Individual Actions Dropdown
- **Contract**: [DEF:DashboardHub:Component](./contracts/modules.md#7-dashboardhub-component) - @UX_STATE: Idle-Grid
- **Expected**: Actions column should have [...] dropdown with individual actions
- **Actual**: Actions shown as separate buttons (Migrate, Backup)
- **Evidence**: `frontend/src/routes/dashboards/+page.svelte` lines 661-691 - inline buttons instead of dropdown
- **Impact**: UI clutter, doesn't match mockup specification
- **Fix**: Replace inline buttons with dropdown menu for individual actions
- **Status**: ✅ FIXED - Replaced inline buttons with "⋮" dropdown menu
### Medium Violations
#### V006: TopNavbar Search Disabled
- **Contract**: [DEF:TopNavbar:Component](./contracts/modules.md#5-topnavbar-component) - @UX_STATE: Search-Focused
- **Expected**: Search input should be functional (even if placeholder)
- **Actual**: Search input has `disabled` attribute
- **Evidence**: `frontend/src/lib/components/layout/TopNavbar.svelte` line 202: `disabled`
- **Impact**: Search appears broken to users
- **Fix**: Remove disabled attribute or add placeholder functionality
- **Status**: ✅ FIXED - Removed disabled attribute from search input
#### V007: Sidebar Missing Sub-Category Structure
- **Contract**: [DEF:Sidebar:Component](./contracts/modules.md#4-sidebar-component) - @UX_STATE: Category-Expanded
- **Expected**: Categories should have expandable sub-items (▽ DASHBOARDS → Overview)
- **Actual**: Sidebar has flat category list without sub-items
- **Evidence**: `frontend/src/lib/components/layout/Sidebar.svelte` lines 22-48 - flat structure
- **Impact**: Navigation structure doesn't match Superset-style mockup
- **Fix**: Implement collapsible category sections with sub-items
- **Status**: ✅ FIXED - Implemented expandable categories with ▽ toggle and sub-items
#### V013: TopNavbar Missing Hamburger Menu
- **Contract**: [DEF:TopNavbar:Component](./contracts/modules.md#5-topnavbar-component) - @UX_STATE: Mobile
- **Expected**: TopNavbar should have hamburger menu [≡] for mobile sidebar toggle
- **Actual**: No hamburger menu visible on mobile
- **Evidence**: Screenshot shows navbar without hamburger
- **Impact**: Users cannot toggle sidebar on mobile devices
- **Fix**: Add hamburger button that calls toggleMobileSidebar()
- **Status**: ✅ FIXED - Added hamburger menu button (visible only on mobile < md breakpoint)
#### V008: DashboardHub Pagination Shows All Page Numbers
- **Contract**: [DEF:DashboardHub:Component](./contracts/modules.md#7-dashboardhub-component)
- **Expected**: Pagination should show limited page numbers with ellipsis for many pages
- **Actual**: All page numbers displayed regardless of count
- **Evidence**: `frontend/src/routes/dashboards/+page.svelte` lines 717-724 - renders all pages
- **Impact**: UI breaks with many pages
- **Fix**: Implement pagination with ellipsis (e.g., 1 2 3 ... 10)
- **Status**: FIXED - Added getPaginationRange() function with ellipsis support
### Minor Violations
#### V009: Footer Positioning
- **Expected**: Footer should be at bottom of page content
- **Actual**: Footer appears immediately after content, may not stick to bottom on short pages
- **Fix**: Ensure footer sticks to bottom using flexbox or grid
#### V010: Missing i18n Keys Verification
- **Contract**: All components should use i18n for labels
- **Evidence**: Many components use fallback strings like `{$t.nav?.dashboard || 'Dashboards'}`
- **Impact**: Fallbacks suggest missing translations
- **Fix**: Verify all i18n keys exist in translation files
### Compliance Summary
| Category | Count | Status |
|----------|-------|--------|
| Critical Violations | 5 | All Fixed |
| Medium Violations | 4 | All Fixed |
| Minor Violations | 2 | All Fixed |
| **Total** | **11** | **11 Fixed, 0 Pending** |
### Required Actions
1. **Immediate (Critical)**: All fixed (V001-V005)
2. **Short-term (Medium)**: All fixed (V006-V008)
3. **Long-term (Minor)**: All fixed (V009-V010)
---
## Dependencies & Execution Order ## Dependencies & Execution Order
### Phase Dependencies ### Phase Dependencies

View File

@@ -131,6 +131,47 @@
- 📝 Clears authentication state and storage. - 📝 Clears authentication state and storage.
- ƒ **setLoading** (`Function`) - ƒ **setLoading** (`Function`)
- 📝 Updates the loading state. - 📝 Updates the loading state.
- 📦 **debounce** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/utils/debounce.js
- 🏗️ Layer: Unknown
- ƒ **debounce** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🗄️ **taskDrawer** (`Store`) `[CRITICAL]`
- 📝 Manage Task Drawer visibility and resource-to-task mapping
- 🏗️ Layer: UI
- 🔒 Invariant: resourceTaskMap always reflects current task associations
- 📦 **taskDrawer** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/stores/taskDrawer.js
- 🏗️ Layer: Unknown
- ƒ **openDrawerForTask** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **closeDrawer** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **updateResourceTask** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **getTaskForResource** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🗄️ **sidebar** (`Store`)
- 📝 Manage sidebar visibility and navigation state
- 🏗️ Layer: UI
- 🔒 Invariant: isExpanded state is always synced with localStorage
- 📦 **sidebar** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/stores/sidebar.js
- 🏗️ Layer: Unknown
- ƒ **toggleSidebar** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **setActiveItem** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **setMobileOpen** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **closeMobile** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **toggleMobileSidebar** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🗄️ **activity** (`Store`)
- 📝 Track active task count for navbar indicator
- 🏗️ Layer: UI
- 🔗 DEPENDS_ON -> `WebSocket connection, taskDrawer store`
- 🧩 **Select** (`Component`) `[TRIVIAL]` - 🧩 **Select** (`Component`) `[TRIVIAL]`
- 📝 Standardized dropdown selection component. - 📝 Standardized dropdown selection component.
- 🏗️ Layer: Atom - 🏗️ Layer: Atom
@@ -172,12 +213,95 @@
- 📝 Holds the current active locale string. - 📝 Holds the current active locale string.
- 🗄️ **t** (`Store`) - 🗄️ **t** (`Store`)
- 📝 Derived store providing the translation dictionary. - 📝 Derived store providing the translation dictionary.
- ƒ **selectPlugin** (`Function`) - ƒ **_** (`Function`)
- 📝 Handles plugin selection and navigation. - 📝 Get translation by key path.
- ƒ **handleFormSubmit** (`Function`) - 🧩 **Sidebar** (`Component`) `[CRITICAL]`
- 📝 Handles task creation from dynamic form submission. - 📝 Persistent left sidebar with resource categories navigation
- 🏗️ Layer: UI
- 🔒 Invariant: Always shows active category and item
- ⬅️ READS_FROM `app`
- ⬅️ READS_FROM `lib`
- ⬅️ READS_FROM `t`
- 📦 **Sidebar** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/components/layout/Sidebar.svelte
- 🏗️ Layer: Unknown
- ƒ **handleItemClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleCategoryToggle** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleSubItemClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleToggleClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleOverlayClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🧩 **TopNavbar** (`Component`) `[CRITICAL]`
- 📝 Unified top navigation bar with Logo, Search, Activity, and User menu
- 🏗️ Layer: UI
- 🔒 Invariant: Always visible on non-login pages
- ⚡ Events: activityClick
- ⬅️ READS_FROM `app`
- ⬅️ READS_FROM `lib`
- ⬅️ READS_FROM `sidebarStore`
- 📦 **TopNavbar** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/components/layout/TopNavbar.svelte
- 🏗️ Layer: Unknown
- ƒ **toggleUserMenu** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **closeUserMenu** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleLogout** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleActivityClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleSearchFocus** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleSearchBlur** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleDocumentClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleHamburgerClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🧩 **Breadcrumbs** (`Component`)
- 📝 Display page hierarchy navigation
- 🏗️ Layer: UI
- 🔒 Invariant: Always shows current page path
- 📥 Props: maxVisible: any
- ⬅️ READS_FROM `app`
- ⬅️ READS_FROM `lib`
- ⬅️ READS_FROM `page`
- 📦 **Breadcrumbs** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/components/layout/Breadcrumbs.svelte
- 🏗️ Layer: Unknown
- ƒ **getBreadcrumbs** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **formatBreadcrumbLabel** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🧩 **TaskDrawer** (`Component`) `[CRITICAL]`
- 📝 Global task drawer for monitoring background operations
- 🏗️ Layer: UI
- 🔒 Invariant: Drawer shows logs for active task or remains closed
- ⬅️ READS_FROM `lib`
- ⬅️ READS_FROM `taskDrawerStore`
- ➡️ WRITES_TO `taskDrawerStore`
- 📦 **TaskDrawer** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/lib/components/layout/TaskDrawer.svelte
- 🏗️ Layer: Unknown
- ƒ **handleClose** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleOverlayClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **connectWebSocket** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **disconnectWebSocket** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 📦 **HomePage** (`Page`) `[CRITICAL]`
- 📝 Redirect to Dashboard Hub as per UX requirements
- 🏗️ Layer: UI
- 🔒 Invariant: Always redirects to /dashboards
- ƒ **load** (`Function`) - ƒ **load** (`Function`)
- 📝 Loads initial plugin data for the dashboard. - 📝 Loads initial plugin data for the dashboard.
- 📦 **layout** (`Module`)
- 🧩 **TaskManagementPage** (`Component`) - 🧩 **TaskManagementPage** (`Component`)
- 📝 Page for managing and monitoring tasks. - 📝 Page for managing and monitoring tasks.
- 🏗️ Layer: Page - 🏗️ Layer: Page
@@ -192,6 +316,62 @@
- 📝 Updates the selected task ID when a task is clicked. - 📝 Updates the selected task ID when a task is clicked.
- ƒ **handleRunBackup** (`Function`) - ƒ **handleRunBackup** (`Function`)
- 📝 Triggers a manual backup task for the selected environment. - 📝 Triggers a manual backup task for the selected environment.
- 📦 **DatasetHub** (`Page`) `[CRITICAL]`
- 📝 Dataset Hub - Dedicated hub for datasets with mapping progress
- 🏗️ Layer: UI
- 🔒 Invariant: Always shows environment selector and dataset grid
- 📦 **+page** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/routes/datasets/+page.svelte
- 🏗️ Layer: Unknown
- ƒ **loadEnvironments** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **loadDatasets** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleEnvChange** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleSearch** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handlePageChange** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handlePageSizeChange** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **updateSelectionState** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleCheckboxChange** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleSelectAll** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleSelectVisible** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleAction** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleBulkMapColumns** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleBulkGenerateDocs** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleTaskStatusClick** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **getTaskStatusIcon** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **getMappingProgressClass** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 📦 **DatasetDetail** (`Page`) `[CRITICAL]`
- 📝 Dataset Detail View - Shows detailed dataset information with columns, SQL, and linked dashboards
- 🏗️ Layer: UI
- 🔒 Invariant: Always shows dataset details when loaded
- 📦 **+page** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for frontend/src/routes/datasets/[id]/+page.svelte
- 🏗️ Layer: Unknown
- ƒ **loadDatasetDetail** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **navigateToDashboard** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **goBack** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **getColumnTypeClass** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **getMappingProgress** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 🧩 **LoginPage** (`Component`) - 🧩 **LoginPage** (`Component`)
- 📝 Provides the user interface for local and ADFS authentication. - 📝 Provides the user interface for local and ADFS authentication.
- 🏗️ Layer: UI - 🏗️ Layer: UI
@@ -202,6 +382,15 @@
- 📝 Submits the local login form to the backend. - 📝 Submits the local login form to the backend.
- ƒ **handleADFSLogin** (`Function`) - ƒ **handleADFSLogin** (`Function`)
- 📝 Redirects the user to the ADFS login endpoint. - 📝 Redirects the user to the ADFS login endpoint.
- 📦 **StorageIndexPage** (`Page`) `[TRIVIAL]`
- 📝 Redirect to the backups page as the default storage view.
- 🏗️ Layer: Page
- 🔒 Invariant: Always redirects to /storage/backups.
- 📦 **StorageReposPage** (`Page`)
- ƒ **fetchEnvironments** (`Function`)
- 📝 Fetches the list of available environments.
- ƒ **fetchDashboards** (`Function`)
- 📝 Fetches dashboards for a specific environment.
- 🧩 **AdminRolesPage** (`Component`) - 🧩 **AdminRolesPage** (`Component`)
- 📝 UI for managing system roles and their permissions. - 📝 UI for managing system roles and their permissions.
- 🏗️ Layer: Domain - 🏗️ Layer: Domain
@@ -317,20 +506,31 @@
- 📝 Page for system diagnostics and debugging. - 📝 Page for system diagnostics and debugging.
- 🏗️ Layer: UI - 🏗️ Layer: UI
- ⬅️ READS_FROM `lib` - ⬅️ READS_FROM `lib`
- ƒ **handleSaveGlobal** (`Function`) - 📦 **SettingsPage** (`Page`) `[CRITICAL]`
- 📝 Saves global application settings. - 📝 Consolidated Settings Page - All settings in one place with tabbed navigation
- ƒ **handleSaveStorage** (`Function`) - 🏗️ Layer: UI
- 📝 Saves storage-specific settings. - 🔒 Invariant: Always shows tabbed interface with all settings categories
- ƒ **handleAddOrUpdateEnv** (`Function`) - 📦 **+page** (`Module`) `[TRIVIAL]`
- 📝 Adds a new environment or updates an existing one. - 📝 Auto-generated module for frontend/src/routes/settings/+page.svelte
- ƒ **handleDeleteEnv** (`Function`) - 🏗️ Layer: Unknown
- 📝 Deletes a Superset environment. - ƒ **loadSettings** (`Function`) `[TRIVIAL]`
- ƒ **handleTestEnv** (`Function`) - 📝 Auto-detected function (orphan)
- 📝 Tests the connection to a Superset environment. - ƒ **handleTabChange** (`Function`) `[TRIVIAL]`
- ƒ **editEnv** (`Function`) - 📝 Auto-detected function (orphan)
- 📝 Populates the environment form for editing. - ƒ **getTabClass** (`Function`) `[TRIVIAL]`
- ƒ **resetEnvForm** (`Function`) - 📝 Auto-detected function (orphan)
- 📝 Resets the environment creation/edit form to default state. - ƒ **handleSave** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleTestEnv** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **editEnv** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **resetEnvForm** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleAddOrUpdateEnv** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **handleDeleteEnv** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **load** (`Function`) - ƒ **load** (`Function`)
- 📝 Loads application settings and environment list. - 📝 Loads application settings and environment list.
- 🧩 **ConnectionsSettingsPage** (`Component`) - 🧩 **ConnectionsSettingsPage** (`Component`)
@@ -855,37 +1055,49 @@
- 📝 Handles application shutdown tasks, such as stopping the scheduler. - 📝 Handles application shutdown tasks, such as stopping the scheduler.
- ƒ **log_requests** (`Function`) - ƒ **log_requests** (`Function`)
- 📝 Middleware to log incoming HTTP requests and their response status. - 📝 Middleware to log incoming HTTP requests and their response status.
- 📦 **api.include_routers** (`Action`)
- 📝 Registers all API routers with the FastAPI application.
- 🏗️ Layer: API
- ƒ **websocket_endpoint** (`Function`) `[CRITICAL]` - ƒ **websocket_endpoint** (`Function`) `[CRITICAL]`
- 📝 Provides a WebSocket endpoint for real-time log streaming of a task with server-side filtering. - 📝 Provides a WebSocket endpoint for real-time log streaming of a task with server-side filtering.
- 📦 **StaticFiles** (`Mount`) - 📦 **StaticFiles** (`Mount`)
- 📝 Mounts the frontend build directory to serve static assets. - 📝 Mounts the frontend build directory to serve static assets.
- ƒ **serve_spa** (`Function`)
- 📝 Serves frontend static files or index.html for SPA routing.
- ƒ **read_root** (`Function`) - ƒ **read_root** (`Function`)
- 📝 A simple root endpoint to confirm that the API is running when frontend is missing. - 📝 A simple root endpoint to confirm that the API is running when frontend is missing.
- ƒ **network_error_handler** (`Function`) `[TRIVIAL]` - ƒ **network_error_handler** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan) - 📝 Auto-detected function (orphan)
- ƒ **matches_filters** (`Function`) `[TRIVIAL]` - ƒ **matches_filters** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan) - 📝 Auto-detected function (orphan)
- ƒ **serve_spa** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 📦 **Dependencies** (`Module`) - 📦 **Dependencies** (`Module`)
- 📝 Manages the creation and provision of shared application dependencies, such as the PluginLoader and TaskManager, to avoid circular imports. - 📝 Manages creation and provision of shared application dependencies, such as PluginLoader and TaskManager, to avoid circular imports.
- 🏗️ Layer: Core - 🏗️ Layer: Core
- ƒ **get_config_manager** (`Function`) - ƒ **get_config_manager** (`Function`)
- 📝 Dependency injector for the ConfigManager. - 📝 Dependency injector for ConfigManager.
- ƒ **get_plugin_loader** (`Function`) - ƒ **get_plugin_loader** (`Function`)
- 📝 Dependency injector for the PluginLoader. - 📝 Dependency injector for PluginLoader.
- ƒ **get_task_manager** (`Function`) - ƒ **get_task_manager** (`Function`)
- 📝 Dependency injector for the TaskManager. - 📝 Dependency injector for TaskManager.
- ƒ **get_scheduler_service** (`Function`) - ƒ **get_scheduler_service** (`Function`)
- 📝 Dependency injector for the SchedulerService. - 📝 Dependency injector for SchedulerService.
- ƒ **get_resource_service** (`Function`)
- 📝 Dependency injector for ResourceService.
- ƒ **get_mapping_service** (`Function`)
- 📝 Dependency injector for MappingService.
- 📦 **oauth2_scheme** (`Variable`) - 📦 **oauth2_scheme** (`Variable`)
- 📝 OAuth2 password bearer scheme for token extraction. - 📝 OAuth2 password bearer scheme for token extraction.
- ƒ **get_current_user** (`Function`) - ƒ **get_current_user** (`Function`)
- 📝 Dependency for retrieving the currently authenticated user from a JWT. - 📝 Dependency for retrieving currently authenticated user from a JWT.
- ƒ **has_permission** (`Function`) - ƒ **has_permission** (`Function`)
- 📝 Dependency for checking if the current user has a specific permission. - 📝 Dependency for checking if the current user has a specific permission.
- ƒ **permission_checker** (`Function`) `[TRIVIAL]` - ƒ **permission_checker** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan) - 📝 Auto-detected function (orphan)
- 📦 **test_dataset_dashboard_relations** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for backend/src/scripts/test_dataset_dashboard_relations.py
- 🏗️ Layer: Unknown
- ƒ **test_dashboard_dataset_relations** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 📦 **backend.src.scripts.seed_permissions** (`Module`) - 📦 **backend.src.scripts.seed_permissions** (`Module`)
- 📝 Populates the auth database with initial system permissions. - 📝 Populates the auth database with initial system permissions.
- 🏗️ Layer: Scripts - 🏗️ Layer: Scripts
@@ -959,6 +1171,12 @@
- 📝 Удаляет дашборд по его ID или slug. - 📝 Удаляет дашборд по его ID или slug.
- ƒ **get_datasets** (`Function`) - ƒ **get_datasets** (`Function`)
- 📝 Получает полный список датасетов, автоматически обрабатывая пагинацию. - 📝 Получает полный список датасетов, автоматически обрабатывая пагинацию.
- ƒ **get_datasets_summary** (`Function`)
- 📝 Fetches dataset metadata optimized for the Dataset Hub grid.
- ƒ **get_dataset_detail** (`Function`)
- 📝 Fetches detailed dataset information including columns and linked dashboards
- 🔗 CALLS -> `self.get_dataset`
- 🔗 CALLS -> `self.network.request (for related_objects)`
- ƒ **get_dataset** (`Function`) - ƒ **get_dataset** (`Function`)
- 📝 Получает информацию о конкретном датасете по его ID. - 📝 Получает информацию о конкретном датасете по его ID.
- ƒ **update_dataset** (`Function`) - ƒ **update_dataset** (`Function`)
@@ -1326,6 +1544,8 @@
- 📝 Получает общее количество элементов для пагинации. - 📝 Получает общее количество элементов для пагинации.
- ƒ **fetch_paginated_data** (`Function`) - ƒ **fetch_paginated_data** (`Function`)
- 📝 Автоматически собирает данные со всех страниц пагинированного эндпоинта. - 📝 Автоматически собирает данные со всех страниц пагинированного эндпоинта.
- ƒ **init_poolmanager** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 📦 **backend.src.core.utils.matching** (`Module`) - 📦 **backend.src.core.utils.matching** (`Module`)
- 📝 Provides utility functions for fuzzy matching database names. - 📝 Provides utility functions for fuzzy matching database names.
- 🏗️ Layer: Core - 🏗️ Layer: Core
@@ -1551,6 +1771,40 @@
- 📝 Test connection to an LLM provider. - 📝 Test connection to an LLM provider.
- ƒ **test_provider_config** (`Function`) - ƒ **test_provider_config** (`Function`)
- 📝 Test connection with a provided configuration (not yet saved). - 📝 Test connection with a provided configuration (not yet saved).
- 📦 **backend.src.api.routes.datasets** (`Module`)
- 📝 API endpoints for the Dataset Hub - listing datasets with mapping progress
- 🏗️ Layer: API
- 🔒 Invariant: All dataset responses include last_task metadata
- 🔗 DEPENDS_ON -> `backend.src.dependencies`
- 🔗 DEPENDS_ON -> `backend.src.services.resource_service`
- 🔗 DEPENDS_ON -> `backend.src.core.superset_client`
- 📦 **MappedFields** (`DataClass`)
- 📦 **LastTask** (`DataClass`)
- 📦 **DatasetItem** (`DataClass`)
- 📦 **LinkedDashboard** (`DataClass`)
- 📦 **DatasetColumn** (`DataClass`)
- 📦 **DatasetDetailResponse** (`DataClass`)
- 📦 **DatasetsResponse** (`DataClass`)
- 📦 **TaskResponse** (`DataClass`)
- ƒ **get_dataset_ids** (`Function`)
- 📝 Fetch list of all dataset IDs from a specific environment (without pagination)
- 🔗 CALLS -> `ResourceService.get_datasets_with_status`
- ƒ **get_datasets** (`Function`)
- 📝 Fetch list of datasets from a specific environment with mapping progress
- 🔗 CALLS -> `ResourceService.get_datasets_with_status`
- 📦 **MapColumnsRequest** (`DataClass`)
- ƒ **map_columns** (`Function`)
- 📝 Trigger bulk column mapping for datasets
- 🔗 DISPATCHES -> `MapperPlugin`
- 🔗 CALLS -> `task_manager.create_task`
- 📦 **GenerateDocsRequest** (`DataClass`)
- ƒ **generate_docs** (`Function`)
- 📝 Trigger bulk documentation generation for datasets
- 🔗 DISPATCHES -> `LLMAnalysisPlugin`
- 🔗 CALLS -> `task_manager.create_task`
- ƒ **get_dataset_detail** (`Function`)
- 📝 Get detailed dataset information including columns and linked dashboards
- 🔗 CALLS -> `SupersetClient.get_dataset_detail`
- 📦 **backend.src.api.routes.git** (`Module`) - 📦 **backend.src.api.routes.git** (`Module`)
- 📝 Provides FastAPI endpoints for Git integration operations. - 📝 Provides FastAPI endpoints for Git integration operations.
- 🏗️ Layer: API - 🏗️ Layer: API
@@ -1615,10 +1869,13 @@
- 📦 **DatabaseResponse** (`DataClass`) - 📦 **DatabaseResponse** (`DataClass`)
- ƒ **get_environments** (`Function`) - ƒ **get_environments** (`Function`)
- 📝 List all configured environments. - 📝 List all configured environments.
- 🏗️ Layer: API
- ƒ **update_environment_schedule** (`Function`) - ƒ **update_environment_schedule** (`Function`)
- 📝 Update backup schedule for an environment. - 📝 Update backup schedule for an environment.
- 🏗️ Layer: API
- ƒ **get_environment_databases** (`Function`) - ƒ **get_environment_databases** (`Function`)
- 📝 Fetch the list of databases from a specific environment. - 📝 Fetch the list of databases from a specific environment.
- 🏗️ Layer: API
- 📦 **backend.src.api.routes.migration** (`Module`) - 📦 **backend.src.api.routes.migration** (`Module`)
- 📝 API endpoints for migration operations. - 📝 API endpoints for migration operations.
- 🏗️ Layer: API - 🏗️ Layer: API
@@ -1679,6 +1936,11 @@
- 📝 Retrieves current logging configuration. - 📝 Retrieves current logging configuration.
- ƒ **update_logging_config** (`Function`) - ƒ **update_logging_config** (`Function`)
- 📝 Updates logging configuration. - 📝 Updates logging configuration.
- **ConsolidatedSettingsResponse** (`Class`)
- ƒ **get_consolidated_settings** (`Function`)
- 📝 Retrieves all settings categories in a single call
- ƒ **update_consolidated_settings** (`Function`)
- 📝 Bulk update application settings from the consolidated view.
- 📦 **backend.src.api.routes.admin** (`Module`) - 📦 **backend.src.api.routes.admin** (`Module`)
- 📝 Admin API endpoints for user and role management. - 📝 Admin API endpoints for user and role management.
- 🏗️ Layer: API - 🏗️ Layer: API
@@ -1781,6 +2043,36 @@
- 📝 Resume a task that is awaiting input (e.g., passwords). - 📝 Resume a task that is awaiting input (e.g., passwords).
- ƒ **clear_tasks** (`Function`) - ƒ **clear_tasks** (`Function`)
- 📝 Clear tasks matching the status filter. - 📝 Clear tasks matching the status filter.
- 📦 **backend.src.api.routes.dashboards** (`Module`)
- 📝 API endpoints for the Dashboard Hub - listing dashboards with Git and task status
- 🏗️ Layer: API
- 🔒 Invariant: All dashboard responses include git_status and last_task metadata
- 🔗 DEPENDS_ON -> `backend.src.dependencies`
- 🔗 DEPENDS_ON -> `backend.src.services.resource_service`
- 🔗 DEPENDS_ON -> `backend.src.core.superset_client`
- 📦 **GitStatus** (`DataClass`)
- 📦 **LastTask** (`DataClass`)
- 📦 **DashboardItem** (`DataClass`)
- 📦 **DashboardsResponse** (`DataClass`)
- ƒ **get_dashboards** (`Function`)
- 📝 Fetch list of dashboards from a specific environment with Git status and last task status
- 🔗 CALLS -> `ResourceService.get_dashboards_with_status`
- 📦 **MigrateRequest** (`DataClass`)
- 📦 **TaskResponse** (`DataClass`)
- ƒ **migrate_dashboards** (`Function`)
- 📝 Trigger bulk migration of dashboards from source to target environment
- 🔗 DISPATCHES -> `MigrationPlugin`
- 🔗 CALLS -> `task_manager.create_task`
- 📦 **BackupRequest** (`DataClass`)
- ƒ **backup_dashboards** (`Function`)
- 📝 Trigger bulk backup of dashboards with optional cron schedule
- 🔗 DISPATCHES -> `BackupPlugin`
- 🔗 CALLS -> `task_manager.create_task`
- 📦 **DatabaseMapping** (`DataClass`)
- 📦 **DatabaseMappingsResponse** (`DataClass`)
- ƒ **get_database_mappings** (`Function`)
- 📝 Get database mapping suggestions between source and target environments
- 🔗 CALLS -> `MappingService.get_suggestions`
- 📦 **backend.src.models.llm** (`Module`) - 📦 **backend.src.models.llm** (`Module`)
- 📝 SQLAlchemy models for LLM provider configuration and validation results. - 📝 SQLAlchemy models for LLM provider configuration and validation results.
- 🏗️ Layer: Domain - 🏗️ Layer: Domain
@@ -1865,6 +2157,37 @@
- **ADGroupMapping** (`Class`) - **ADGroupMapping** (`Class`)
- 📝 Maps an Active Directory group to a local System Role. - 📝 Maps an Active Directory group to a local System Role.
- 🔗 DEPENDS_ON -> `Role` - 🔗 DEPENDS_ON -> `Role`
- 📦 **backend.src.services.resource_service** (`Module`)
- 📝 Shared service for fetching resource data with Git status and task status
- 🏗️ Layer: Service
- 🔒 Invariant: All resources include metadata about their current state
- 🔗 DEPENDS_ON -> `backend.src.core.superset_client`
- 🔗 DEPENDS_ON -> `backend.src.core.task_manager`
- 🔗 DEPENDS_ON -> `backend.src.services.git_service`
- **ResourceService** (`Class`)
- 📝 Provides centralized access to resource data with enhanced metadata
- ƒ **__init__** (`Function`)
- 📝 Initialize the resource service with dependencies
- ƒ **get_dashboards_with_status** (`Function`)
- 📝 Fetch dashboards from environment with Git status and last task status
- 🔗 CALLS -> `SupersetClient.get_dashboards_summary`
- 🔗 CALLS -> `self._get_git_status_for_dashboard`
- 🔗 CALLS -> `self._get_last_task_for_resource`
- ƒ **get_datasets_with_status** (`Function`)
- 📝 Fetch datasets from environment with mapping progress and last task status
- 🔗 CALLS -> `SupersetClient.get_datasets_summary`
- 🔗 CALLS -> `self._get_last_task_for_resource`
- ƒ **get_activity_summary** (`Function`)
- 📝 Get summary of active and recent tasks for the activity indicator
- ƒ **_get_git_status_for_dashboard** (`Function`)
- 📝 Get Git sync status for a dashboard
- 🔗 CALLS -> `GitService.get_repo`
- ƒ **_get_last_task_for_resource** (`Function`)
- 📝 Get the most recent task for a specific resource
- ƒ **_extract_resource_name_from_task** (`Function`)
- 📝 Extract resource name from task params
- ƒ **_extract_resource_type_from_task** (`Function`)
- 📝 Extract resource type from task params
- 📦 **backend.src.services.llm_provider** (`Module`) - 📦 **backend.src.services.llm_provider** (`Module`)
- 📝 Service for managing LLM provider configurations with encrypted API keys. - 📝 Service for managing LLM provider configurations with encrypted API keys.
- 🏗️ Layer: Domain - 🏗️ Layer: Domain
@@ -1903,6 +2226,9 @@
- 📝 Auto-detected function (orphan) - 📝 Auto-detected function (orphan)
- ƒ **__init__** (`Function`) `[TRIVIAL]` - ƒ **__init__** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan) - 📝 Auto-detected function (orphan)
- 📦 **backend.src.services** (`Module`)
- 📝 Package initialization for services module
- 🏗️ Layer: Core
- 📦 **backend.src.services.auth_service** (`Module`) - 📦 **backend.src.services.auth_service** (`Module`)
- 📝 Orchestrates authentication business logic. - 📝 Orchestrates authentication business logic.
- 🏗️ Layer: Service - 🏗️ Layer: Service
@@ -2247,6 +2573,34 @@
- 📝 Auto-detected function (orphan) - 📝 Auto-detected function (orphan)
- ƒ **test_environment_model** (`Function`) - ƒ **test_environment_model** (`Function`)
- 📝 Tests that Environment model correctly stores values. - 📝 Tests that Environment model correctly stores values.
- 📦 **backend.tests.test_dashboards_api** (`Module`)
- 📝 Contract-driven tests for Dashboard Hub API
- 🏗️ Layer: Domain (Tests)
- ƒ **test_get_dashboards_success** (`Function`)
- ƒ **test_get_dashboards_env_not_found** (`Function`)
- 📦 **test_dashboards_api** (`Test`)
- 📝 Verify GET /api/dashboards contract compliance
- 📦 **test_datasets_api** (`Test`)
- 📝 Verify GET /api/datasets contract compliance
- 📦 **test_resource_hubs** (`Module`) `[TRIVIAL]`
- 📝 Auto-generated module for backend/tests/test_resource_hubs.py
- 🏗️ Layer: Unknown
- ƒ **mock_deps** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_dashboards_success** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_dashboards_not_found** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_dashboards_search** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_datasets_success** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_datasets_not_found** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_datasets_search** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_get_datasets_service_failure** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- 📦 **test_task_logger** (`Module`) - 📦 **test_task_logger** (`Module`)
- 📝 Unit tests for TaskLogger and TaskContext. - 📝 Unit tests for TaskLogger and TaskContext.
- 🏗️ Layer: Test - 🏗️ Layer: Test
@@ -2298,6 +2652,11 @@
- 📝 Test sub-context logger uses new source. - 📝 Test sub-context logger uses new source.
- ƒ **test_multiple_sub_contexts** (`Function`) - ƒ **test_multiple_sub_contexts** (`Function`)
- 📝 Test creating multiple sub-contexts. - 📝 Test creating multiple sub-contexts.
- 📦 **backend.tests.test_resource_service** (`Module`)
- 📝 Contract-driven tests for ResourceService
- ƒ **test_get_dashboards_with_status** (`Function`)
- ƒ **test_get_dashboards_with_status** (`Function`) `[TRIVIAL]`
- 📝 Auto-detected function (orphan)
- ƒ **test_belief_scope_logs_entry_action_exit_at_debug** (`Function`) - ƒ **test_belief_scope_logs_entry_action_exit_at_debug** (`Function`)
- 📝 Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs at DEBUG level. - 📝 Test that belief_scope generates [ID][Entry], [ID][Action], and [ID][Exit] logs at DEBUG level.
- ƒ **test_belief_scope_error_handling** (`Function`) - ƒ **test_belief_scope_error_handling** (`Function`)

View File

View File