linter + новые таски

This commit is contained in:
2026-02-10 12:53:01 +03:00
parent 794cc55fe7
commit 76b98fcf8f
73 changed files with 2298 additions and 726 deletions

View File

@@ -10,7 +10,6 @@
# [SECTION: IMPORTS]
from pydantic import Field
from pydantic_settings import BaseSettings
import os
# [/SECTION]
# [DEF:AuthConfig:Class]

View File

@@ -11,8 +11,8 @@
# [SECTION: IMPORTS]
from datetime import datetime, timedelta
from typing import Optional, List
from jose import JWTError, jwt
from typing import Optional
from jose import jwt
from .config import auth_config
from ..logger import belief_scope
# [/SECTION]

View File

@@ -11,7 +11,7 @@
# [SECTION: IMPORTS]
from typing import Optional, List
from sqlalchemy.orm import Session
from ...models.auth import User, Role, Permission, ADGroupMapping
from ...models.auth import User, Role, Permission
from ..logger import belief_scope
# [/SECTION]

View File

@@ -15,7 +15,7 @@ import json
import os
from pathlib import Path
from typing import Optional, List
from .config_models import AppConfig, Environment, GlobalSettings
from .config_models import AppConfig, Environment, GlobalSettings, StorageConfig
from .logger import logger, configure_logger, belief_scope
# [/SECTION]
@@ -46,7 +46,7 @@ class ConfigManager:
# 3. Runtime check of @POST
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
logger.info(f"[ConfigManager][Exit] Initialized")
logger.info("[ConfigManager][Exit] Initialized")
# [/DEF:__init__:Function]
# [DEF:_load_config:Function]
@@ -59,7 +59,7 @@ class ConfigManager:
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
if not self.config_path.exists():
logger.info(f"[_load_config][Action] Config file not found. Creating default.")
logger.info("[_load_config][Action] Config file not found. Creating default.")
default_config = AppConfig(
environments=[],
settings=GlobalSettings()
@@ -75,7 +75,7 @@ class ConfigManager:
del data["settings"]["backup_path"]
config = AppConfig(**data)
logger.info(f"[_load_config][Coherence:OK] Configuration loaded")
logger.info("[_load_config][Coherence:OK] Configuration loaded")
return config
except Exception as e:
logger.error(f"[_load_config][Coherence:Failed] Error loading config: {e}")
@@ -103,7 +103,7 @@ class ConfigManager:
try:
with open(self.config_path, "w") as f:
json.dump(config.dict(), f, indent=4)
logger.info(f"[_save_config_to_disk][Action] Configuration saved")
logger.info("[_save_config_to_disk][Action] Configuration saved")
except Exception as e:
logger.error(f"[_save_config_to_disk][Coherence:Failed] Failed to save: {e}")
# [/DEF:_save_config_to_disk:Function]
@@ -134,7 +134,7 @@ class ConfigManager:
# @PARAM: settings (GlobalSettings) - The new global settings.
def update_global_settings(self, settings: GlobalSettings):
with belief_scope("update_global_settings"):
logger.info(f"[update_global_settings][Entry] Updating settings")
logger.info("[update_global_settings][Entry] Updating settings")
# 1. Runtime check of @PRE
assert isinstance(settings, GlobalSettings), "settings must be an instance of GlobalSettings"
@@ -146,7 +146,7 @@ class ConfigManager:
# Reconfigure logger with new settings
configure_logger(settings.logging)
logger.info(f"[update_global_settings][Exit] Settings updated")
logger.info("[update_global_settings][Exit] Settings updated")
# [/DEF:update_global_settings:Function]
# [DEF:validate_path:Function]
@@ -222,7 +222,7 @@ class ConfigManager:
self.config.environments.append(env)
self.save()
logger.info(f"[add_environment][Exit] Environment added")
logger.info("[add_environment][Exit] Environment added")
# [/DEF:add_environment:Function]
# [DEF:update_environment:Function]

View File

@@ -11,14 +11,9 @@
# [SECTION: IMPORTS]
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.orm import sessionmaker
from ..models.mapping import Base
# Import models to ensure they're registered with Base
from ..models.task import TaskRecord
from ..models.connection import ConnectionConfig
from ..models.git import GitServerConfig, GitRepository, DeploymentEnvironment
from ..models.auth import User, Role, Permission, ADGroupMapping
from ..models.llm import LLMProvider, ValidationRecord
from .logger import belief_scope
from .auth.config import auth_config
import os

View File

@@ -111,7 +111,6 @@ def configure_logger(config):
# Add file handler if file_path is set
if config.file_path:
import os
from pathlib import Path
log_file = Path(config.file_path)
log_file.parent.mkdir(parents=True, exist_ok=True)

View File

@@ -11,12 +11,10 @@
import zipfile
import yaml
import os
import shutil
import tempfile
from pathlib import Path
from typing import Dict
from .logger import logger, belief_scope
import yaml
# [/SECTION]
# [DEF:MigrationEngine:Class]

View File

@@ -1,9 +1,8 @@
import importlib.util
import os
import sys # Added this line
from typing import Dict, Type, List, Optional
from typing import Dict, List, Optional
from .plugin_base import PluginBase, PluginConfig
from jsonschema import validate
from .logger import belief_scope
# [DEF:PluginLoader:Class]

View File

@@ -10,7 +10,6 @@ from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from .logger import logger, belief_scope
from .config_manager import ConfigManager
from typing import Optional
import asyncio
# [/SECTION]

View File

@@ -13,10 +13,10 @@
import json
import zipfile
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union, cast
from typing import Dict, List, Optional, Tuple, Union, cast
from requests import Response
from .logger import logger as app_logger, belief_scope
from .utils.network import APIClient, SupersetAPIError, AuthenticationError, DashboardNotFoundError, NetworkError
from .utils.network import APIClient, SupersetAPIError
from .utils.fileio import get_filename_from_headers
from .config_models import Environment
# [/SECTION]
@@ -212,6 +212,30 @@ class SupersetClient:
return total_count, paginated_data
# [/DEF:get_datasets:Function]
# [DEF:get_datasets_summary:Function]
# @PURPOSE: Fetches dataset metadata optimized for the Dataset Hub grid.
# @PRE: Client is authenticated.
# @POST: Returns a list of dataset metadata summaries.
# @RETURN: List[Dict]
def get_datasets_summary(self) -> List[Dict]:
with belief_scope("SupersetClient.get_datasets_summary"):
query = {
"columns": ["id", "table_name", "schema", "database"]
}
_, datasets = self.get_datasets(query=query)
# Map fields to match the contracts
result = []
for ds in datasets:
result.append({
"id": ds.get("id"),
"table_name": ds.get("table_name"),
"schema": ds.get("schema"),
"database": ds.get("database", {}).get("database_name", "Unknown")
})
return result
# [/DEF:get_datasets_summary:Function]
# [DEF:get_dataset:Function]
# @PURPOSE: Получает информацию о конкретном датасете по его ID.
# @PARAM: dataset_id (int) - ID датасета.

View File

@@ -5,7 +5,6 @@
# @LAYER: Core
# @RELATION: Uses TaskPersistenceService and TaskLogPersistenceService to delete old tasks and logs.
from datetime import datetime, timedelta
from typing import List
from .persistence import TaskPersistenceService, TaskLogPersistenceService
from ..logger import logger, belief_scope

View File

@@ -7,7 +7,7 @@
# @INVARIANT: Each TaskContext is bound to a single task execution.
# [SECTION: IMPORTS]
from typing import Dict, Any, Optional, Callable
from typing import Dict, Any, Callable
from .task_logger import TaskLogger
# [/SECTION]

View File

@@ -14,7 +14,7 @@ from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
from typing import Dict, Any, List, Optional
from .models import Task, TaskStatus, LogEntry, LogFilter, LogStats, TaskLog
from .models import Task, TaskStatus, LogEntry, LogFilter, LogStats
from .persistence import TaskPersistenceService, TaskLogPersistenceService
from .context import TaskContext
from ..logger import logger, belief_scope, should_log_task_level
@@ -136,7 +136,7 @@ class TaskManager:
logger.error(f"Plugin with ID '{plugin_id}' not found.")
raise ValueError(f"Plugin with ID '{plugin_id}' not found.")
plugin = self.plugin_loader.get_plugin(plugin_id)
self.plugin_loader.get_plugin(plugin_id)
if not isinstance(params, dict):
logger.error("Task parameters must be a dictionary.")
@@ -248,7 +248,8 @@ class TaskManager:
async def wait_for_resolution(self, task_id: str):
with belief_scope("TaskManager.wait_for_resolution", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task: return
if not task:
return
task.status = TaskStatus.AWAITING_MAPPING
self.persistence_service.persist_task(task)
@@ -269,7 +270,8 @@ class TaskManager:
async def wait_for_input(self, task_id: str):
with belief_scope("TaskManager.wait_for_input", f"task_id={task_id}"):
task = self.tasks.get(task_id)
if not task: return
if not task:
return
# Status is already set to AWAITING_INPUT by await_input()
self.task_futures[task_id] = self.loop.create_future()

View File

@@ -7,11 +7,10 @@
# [SECTION: IMPORTS]
from datetime import datetime
from typing import List, Optional, Dict, Any
from typing import List, Optional
import json
from sqlalchemy.orm import Session
from sqlalchemy import and_, or_
from ...models.task import TaskRecord, TaskLogRecord
from ..database import TasksSessionLocal
from .models import Task, TaskStatus, LogEntry, TaskLog, LogFilter, LogStats

View File

@@ -8,7 +8,6 @@
# [SECTION: IMPORTS]
from typing import Dict, Any, Optional, Callable
from datetime import datetime
# [/SECTION]
# [DEF:TaskLogger:Class]

View File

@@ -11,7 +11,7 @@
# [SECTION: IMPORTS]
import pandas as pd # type: ignore
import psycopg2 # type: ignore
from typing import Dict, List, Optional, Any
from typing import Dict, Optional, Any
from ..logger import logger as app_logger, belief_scope
# [/SECTION]

View File

@@ -19,7 +19,6 @@ from datetime import date, datetime
import shutil
import zlib
from dataclasses import dataclass
import yaml
from ..logger import logger as app_logger, belief_scope
# [/SECTION]

View File

@@ -177,7 +177,8 @@ class APIClient:
# @POST: Returns headers including auth tokens.
def headers(self) -> Dict[str, str]:
with belief_scope("headers"):
if not self._authenticated: self.authenticate()
if not self._authenticated:
self.authenticate()
return {
"Authorization": f"Bearer {self._tokens['access_token']}",
"X-CSRFToken": self._tokens.get("csrf_token", ""),
@@ -200,7 +201,8 @@ class APIClient:
with belief_scope("request"):
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy()
if headers: _headers.update(headers)
if headers:
_headers.update(headers)
try:
response = self.session.request(method, full_url, headers=_headers, **kwargs)
@@ -223,9 +225,12 @@ class APIClient:
status_code = e.response.status_code
if status_code == 502 or status_code == 503 or status_code == 504:
raise NetworkError(f"Environment unavailable (Status {status_code})", status_code=status_code) from e
if status_code == 404: raise DashboardNotFoundError(endpoint) from e
if status_code == 403: raise PermissionDeniedError() from e
if status_code == 401: raise AuthenticationError() from e
if status_code == 404:
raise DashboardNotFoundError(endpoint) from e
if status_code == 403:
raise PermissionDeniedError() from e
if status_code == 401:
raise AuthenticationError() from e
raise SupersetAPIError(f"API Error {status_code}: {e.response.text}") from e
# [/DEF:_handle_http_error:Function]
@@ -237,9 +242,12 @@ class APIClient:
# @POST: Raises a NetworkError.
def _handle_network_error(self, e: requests.exceptions.RequestException, url: str):
with belief_scope("_handle_network_error"):
if isinstance(e, requests.exceptions.Timeout): msg = "Request timeout"
elif isinstance(e, requests.exceptions.ConnectionError): msg = "Connection error"
else: msg = f"Unknown network error: {e}"
if isinstance(e, requests.exceptions.Timeout):
msg = "Request timeout"
elif isinstance(e, requests.exceptions.ConnectionError):
msg = "Connection error"
else:
msg = f"Unknown network error: {e}"
raise NetworkError(msg, url=url) from e
# [/DEF:_handle_network_error:Function]
@@ -256,7 +264,9 @@ class APIClient:
def upload_file(self, endpoint: str, file_info: Dict[str, Any], extra_data: Optional[Dict] = None, timeout: Optional[int] = None) -> Dict:
with belief_scope("upload_file"):
full_url = f"{self.base_url}{endpoint}"
_headers = self.headers.copy(); _headers.pop('Content-Type', None)
_headers = self.headers.copy()
_headers.pop('Content-Type', None)
file_obj, file_name, form_field = file_info.get("file_obj"), file_info.get("file_name"), file_info.get("form_field", "file")