semantic markup update
This commit is contained in:
@@ -30,30 +30,33 @@ class ConfigManager:
|
||||
# @POST: self.config is an instance of AppConfig
|
||||
# @PARAM: config_path (str) - Path to the configuration file.
|
||||
def __init__(self, config_path: str = "config.json"):
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
|
||||
|
||||
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
|
||||
|
||||
# 2. Logic implementation
|
||||
self.config_path = Path(config_path)
|
||||
self.config: AppConfig = self._load_config()
|
||||
with belief_scope("__init__"):
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config_path, str) and config_path, "config_path must be a non-empty string"
|
||||
|
||||
logger.info(f"[ConfigManager][Entry] Initializing with {config_path}")
|
||||
|
||||
# 2. Logic implementation
|
||||
self.config_path = Path(config_path)
|
||||
self.config: AppConfig = self._load_config()
|
||||
|
||||
# Configure logger with loaded settings
|
||||
configure_logger(self.config.settings.logging)
|
||||
# Configure logger with loaded settings
|
||||
configure_logger(self.config.settings.logging)
|
||||
|
||||
# 3. Runtime check of @POST
|
||||
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
|
||||
# 3. Runtime check of @POST
|
||||
assert isinstance(self.config, AppConfig), "self.config must be an instance of AppConfig"
|
||||
|
||||
logger.info(f"[ConfigManager][Exit] Initialized")
|
||||
logger.info(f"[ConfigManager][Exit] Initialized")
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:_load_config:Function]
|
||||
# @PURPOSE: Loads the configuration from disk or creates a default one.
|
||||
# @PRE: self.config_path is set.
|
||||
# @POST: isinstance(return, AppConfig)
|
||||
# @RETURN: AppConfig - The loaded or default configuration.
|
||||
def _load_config(self) -> AppConfig:
|
||||
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
|
||||
with belief_scope("_load_config"):
|
||||
logger.debug(f"[_load_config][Entry] Loading from {self.config_path}")
|
||||
|
||||
if not self.config_path.exists():
|
||||
logger.info(f"[_load_config][Action] Config file not found. Creating default.")
|
||||
@@ -83,9 +86,11 @@ class ConfigManager:
|
||||
# [DEF:_save_config_to_disk:Function]
|
||||
# @PURPOSE: Saves the provided configuration object to disk.
|
||||
# @PRE: isinstance(config, AppConfig)
|
||||
# @POST: Configuration saved to disk.
|
||||
# @PARAM: config (AppConfig) - The configuration to save.
|
||||
def _save_config_to_disk(self, config: AppConfig):
|
||||
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
|
||||
with belief_scope("_save_config_to_disk"):
|
||||
logger.debug(f"[_save_config_to_disk][Entry] Saving to {self.config_path}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(config, AppConfig), "config must be an instance of AppConfig"
|
||||
@@ -101,23 +106,31 @@ class ConfigManager:
|
||||
|
||||
# [DEF:save:Function]
|
||||
# @PURPOSE: Saves the current configuration state to disk.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: self._save_config_to_disk called.
|
||||
def save(self):
|
||||
self._save_config_to_disk(self.config)
|
||||
with belief_scope("save"):
|
||||
self._save_config_to_disk(self.config)
|
||||
# [/DEF:save:Function]
|
||||
|
||||
# [DEF:get_config:Function]
|
||||
# @PURPOSE: Returns the current configuration.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: Returns self.config.
|
||||
# @RETURN: AppConfig - The current configuration.
|
||||
def get_config(self) -> AppConfig:
|
||||
return self.config
|
||||
with belief_scope("get_config"):
|
||||
return self.config
|
||||
# [/DEF:get_config:Function]
|
||||
|
||||
# [DEF:update_global_settings:Function]
|
||||
# @PURPOSE: Updates the global settings and persists the change.
|
||||
# @PRE: isinstance(settings, GlobalSettings)
|
||||
# @POST: self.config.settings updated and saved.
|
||||
# @PARAM: settings (GlobalSettings) - The new global settings.
|
||||
def update_global_settings(self, settings: GlobalSettings):
|
||||
logger.info(f"[update_global_settings][Entry] Updating settings")
|
||||
with belief_scope("update_global_settings"):
|
||||
logger.info(f"[update_global_settings][Entry] Updating settings")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(settings, GlobalSettings), "settings must be an instance of GlobalSettings"
|
||||
@@ -134,10 +147,13 @@ class ConfigManager:
|
||||
|
||||
# [DEF:validate_path:Function]
|
||||
# @PURPOSE: Validates if a path exists and is writable.
|
||||
# @PRE: path is a string.
|
||||
# @POST: Returns (bool, str) status.
|
||||
# @PARAM: path (str) - The path to validate.
|
||||
# @RETURN: tuple (bool, str) - (is_valid, message)
|
||||
def validate_path(self, path: str) -> tuple[bool, str]:
|
||||
p = os.path.abspath(path)
|
||||
with belief_scope("validate_path"):
|
||||
p = os.path.abspath(path)
|
||||
if not os.path.exists(p):
|
||||
try:
|
||||
os.makedirs(p, exist_ok=True)
|
||||
@@ -152,24 +168,32 @@ class ConfigManager:
|
||||
|
||||
# [DEF:get_environments:Function]
|
||||
# @PURPOSE: Returns the list of configured environments.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: Returns list of environments.
|
||||
# @RETURN: List[Environment] - List of environments.
|
||||
def get_environments(self) -> List[Environment]:
|
||||
return self.config.environments
|
||||
with belief_scope("get_environments"):
|
||||
return self.config.environments
|
||||
# [/DEF:get_environments:Function]
|
||||
|
||||
# [DEF:has_environments:Function]
|
||||
# @PURPOSE: Checks if at least one environment is configured.
|
||||
# @PRE: self.config is set.
|
||||
# @POST: Returns boolean indicating if environments exist.
|
||||
# @RETURN: bool - True if at least one environment exists.
|
||||
def has_environments(self) -> bool:
|
||||
return len(self.config.environments) > 0
|
||||
with belief_scope("has_environments"):
|
||||
return len(self.config.environments) > 0
|
||||
# [/DEF:has_environments:Function]
|
||||
|
||||
# [DEF:add_environment:Function]
|
||||
# @PURPOSE: Adds a new environment to the configuration.
|
||||
# @PRE: isinstance(env, Environment)
|
||||
# @POST: Environment added or updated in self.config.environments.
|
||||
# @PARAM: env (Environment) - The environment to add.
|
||||
def add_environment(self, env: Environment):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
with belief_scope("add_environment"):
|
||||
logger.info(f"[add_environment][Entry] Adding environment {env.id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert isinstance(env, Environment), "env must be an instance of Environment"
|
||||
@@ -186,11 +210,13 @@ class ConfigManager:
|
||||
# [DEF:update_environment:Function]
|
||||
# @PURPOSE: Updates an existing environment.
|
||||
# @PRE: isinstance(env_id, str) and len(env_id) > 0 and isinstance(updated_env, Environment)
|
||||
# @POST: Returns True if environment was found and updated.
|
||||
# @PARAM: env_id (str) - The ID of the environment to update.
|
||||
# @PARAM: updated_env (Environment) - The updated environment data.
|
||||
# @RETURN: bool - True if updated, False otherwise.
|
||||
def update_environment(self, env_id: str, updated_env: Environment) -> bool:
|
||||
logger.info(f"[update_environment][Entry] Updating {env_id}")
|
||||
with belief_scope("update_environment"):
|
||||
logger.info(f"[update_environment][Entry] Updating {env_id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||
@@ -215,9 +241,11 @@ class ConfigManager:
|
||||
# [DEF:delete_environment:Function]
|
||||
# @PURPOSE: Deletes an environment by ID.
|
||||
# @PRE: isinstance(env_id, str) and len(env_id) > 0
|
||||
# @POST: Environment removed from self.config.environments if it existed.
|
||||
# @PARAM: env_id (str) - The ID of the environment to delete.
|
||||
def delete_environment(self, env_id: str):
|
||||
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
|
||||
with belief_scope("delete_environment"):
|
||||
logger.info(f"[delete_environment][Entry] Deleting {env_id}")
|
||||
|
||||
# 1. Runtime check of @PRE
|
||||
assert env_id and isinstance(env_id, str), "env_id must be a non-empty string"
|
||||
|
||||
@@ -46,33 +46,40 @@ TasksSessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=tasks_e
|
||||
|
||||
# [DEF:init_db:Function]
|
||||
# @PURPOSE: Initializes the database by creating all tables.
|
||||
# @PRE: engine and tasks_engine are initialized.
|
||||
# @POST: Database tables created.
|
||||
def init_db():
|
||||
Base.metadata.create_all(bind=engine)
|
||||
Base.metadata.create_all(bind=tasks_engine)
|
||||
with belief_scope("init_db"):
|
||||
Base.metadata.create_all(bind=engine)
|
||||
Base.metadata.create_all(bind=tasks_engine)
|
||||
# [/DEF:init_db:Function]
|
||||
|
||||
# [DEF:get_db:Function]
|
||||
# @PURPOSE: Dependency for getting a database session.
|
||||
# @PRE: SessionLocal is initialized.
|
||||
# @POST: Session is closed after use.
|
||||
# @RETURN: Generator[Session, None, None]
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
with belief_scope("get_db"):
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
# [/DEF:get_db:Function]
|
||||
|
||||
# [DEF:get_tasks_db:Function]
|
||||
# @PURPOSE: Dependency for getting a tasks database session.
|
||||
# @PRE: TasksSessionLocal is initialized.
|
||||
# @POST: Session is closed after use.
|
||||
# @RETURN: Generator[Session, None, None]
|
||||
def get_tasks_db():
|
||||
db = TasksSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
with belief_scope("get_tasks_db"):
|
||||
db = TasksSessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
# [/DEF:get_tasks_db:Function]
|
||||
|
||||
# [/DEF:backend.src.core.database:Module]
|
||||
|
||||
@@ -22,12 +22,19 @@ _enable_belief_state = True
|
||||
# [DEF:BeliefFormatter:Class]
|
||||
# @PURPOSE: Custom logging formatter that adds belief state prefixes to log messages.
|
||||
class BeliefFormatter(logging.Formatter):
|
||||
# [DEF:format:Function]
|
||||
# @PURPOSE: Formats the log record, adding belief state context if available.
|
||||
# @PRE: record is a logging.LogRecord.
|
||||
# @POST: Returns formatted string.
|
||||
# @PARAM: record (logging.LogRecord) - The log record to format.
|
||||
# @RETURN: str - The formatted log message.
|
||||
def format(self, record):
|
||||
msg = super().format(record)
|
||||
anchor_id = getattr(_belief_state, 'anchor_id', None)
|
||||
if anchor_id:
|
||||
msg = f"[{anchor_id}][Action] {msg}"
|
||||
return msg
|
||||
# [/DEF:format:Function]
|
||||
# [/DEF:BeliefFormatter:Class]
|
||||
|
||||
# Re-using LogEntry from task_manager for consistency
|
||||
@@ -42,8 +49,12 @@ class LogEntry(BaseModel):
|
||||
|
||||
# [/DEF:LogEntry:Class]
|
||||
|
||||
# [DEF:BeliefScope:Function]
|
||||
# [DEF:belief_scope:Function]
|
||||
# @PURPOSE: Context manager for structured Belief State logging.
|
||||
# @PARAM: anchor_id (str) - The identifier for the current semantic block.
|
||||
# @PARAM: message (str) - Optional entry message.
|
||||
# @PRE: anchor_id must be provided.
|
||||
# @POST: Thread-local belief state is updated and entry/exit logs are generated.
|
||||
@contextmanager
|
||||
def belief_scope(anchor_id: str, message: str = ""):
|
||||
# Log Entry if enabled
|
||||
@@ -71,9 +82,9 @@ def belief_scope(anchor_id: str, message: str = ""):
|
||||
# Restore old anchor
|
||||
_belief_state.anchor_id = old_anchor
|
||||
|
||||
# [/DEF:BeliefScope:Function]
|
||||
# [/DEF:belief_scope:Function]
|
||||
|
||||
# [DEF:ConfigureLogger:Function]
|
||||
# [DEF:configure_logger:Function]
|
||||
# @PURPOSE: Configures the logger with the provided logging settings.
|
||||
# @PRE: config is a valid LoggingConfig instance.
|
||||
# @POST: Logger level, handlers, and belief state flag are updated.
|
||||
@@ -115,7 +126,7 @@ def configure_logger(config):
|
||||
handler.setFormatter(BeliefFormatter(
|
||||
'[%(asctime)s][%(levelname)s][%(name)s] %(message)s'
|
||||
))
|
||||
# [/DEF:ConfigureLogger:Function]
|
||||
# [/DEF:configure_logger:Function]
|
||||
|
||||
# [DEF:WebSocketLogHandler:Class]
|
||||
# @SEMANTICS: logging, handler, websocket, buffer
|
||||
@@ -125,38 +136,59 @@ class WebSocketLogHandler(logging.Handler):
|
||||
A logging handler that stores log records and can be extended to send them
|
||||
over WebSockets.
|
||||
"""
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the handler with a fixed-capacity buffer.
|
||||
# @PRE: capacity is an integer.
|
||||
# @POST: Instance initialized with empty deque.
|
||||
# @PARAM: capacity (int) - Maximum number of logs to keep in memory.
|
||||
def __init__(self, capacity: int = 1000):
|
||||
super().__init__()
|
||||
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
|
||||
with belief_scope("WebSocketLogHandler.__init__"):
|
||||
super().__init__()
|
||||
self.log_buffer: deque[LogEntry] = deque(maxlen=capacity)
|
||||
# In a real implementation, you'd have a way to manage active WebSocket connections
|
||||
# e.g., self.active_connections: Set[WebSocket] = set()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:emit:Function]
|
||||
# @PURPOSE: Captures a log record, formats it, and stores it in the buffer.
|
||||
# @PRE: record is a logging.LogRecord.
|
||||
# @POST: Log is added to the log_buffer.
|
||||
# @PARAM: record (logging.LogRecord) - The log record to emit.
|
||||
def emit(self, record: logging.LogRecord):
|
||||
try:
|
||||
log_entry = LogEntry(
|
||||
level=record.levelname,
|
||||
message=self.format(record),
|
||||
context={
|
||||
"name": record.name,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
}
|
||||
)
|
||||
self.log_buffer.append(log_entry)
|
||||
# Here you would typically send the log_entry to all active WebSocket connections
|
||||
# for real-time streaming to the frontend.
|
||||
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
with belief_scope("WebSocketLogHandler.emit"):
|
||||
try:
|
||||
log_entry = LogEntry(
|
||||
level=record.levelname,
|
||||
message=self.format(record),
|
||||
context={
|
||||
"name": record.name,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
}
|
||||
)
|
||||
self.log_buffer.append(log_entry)
|
||||
# Here you would typically send the log_entry to all active WebSocket connections
|
||||
# for real-time streaming to the frontend.
|
||||
# Example: for ws in self.active_connections: await ws.send_json(log_entry.dict())
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
# [/DEF:emit:Function]
|
||||
|
||||
# [DEF:get_recent_logs:Function]
|
||||
# @PURPOSE: Returns a list of recent log entries from the buffer.
|
||||
# @PRE: None.
|
||||
# @POST: Returns list of LogEntry objects.
|
||||
# @RETURN: List[LogEntry] - List of buffered log entries.
|
||||
def get_recent_logs(self) -> List[LogEntry]:
|
||||
"""
|
||||
Returns a list of recent log entries from the buffer.
|
||||
"""
|
||||
return list(self.log_buffer)
|
||||
with belief_scope("WebSocketLogHandler.get_recent_logs"):
|
||||
"""
|
||||
Returns a list of recent log entries from the buffer.
|
||||
"""
|
||||
return list(self.log_buffer)
|
||||
# [/DEF:get_recent_logs:Function]
|
||||
|
||||
# [/DEF:WebSocketLogHandler:Class]
|
||||
|
||||
|
||||
@@ -23,12 +23,14 @@ import yaml
|
||||
# @PURPOSE: Engine for transforming Superset export ZIPs.
|
||||
class MigrationEngine:
|
||||
|
||||
# [DEF:MigrationEngine.transform_zip:Function]
|
||||
# [DEF:transform_zip:Function]
|
||||
# @PURPOSE: Extracts ZIP, replaces database UUIDs in YAMLs, and re-packages.
|
||||
# @PARAM: zip_path (str) - Path to the source ZIP file.
|
||||
# @PARAM: output_path (str) - Path where the transformed ZIP will be saved.
|
||||
# @PARAM: db_mapping (Dict[str, str]) - Mapping of source UUID to target UUID.
|
||||
# @PARAM: strip_databases (bool) - Whether to remove the databases directory from the archive.
|
||||
# @PRE: zip_path must point to a valid Superset export archive.
|
||||
# @POST: Transformed archive is saved to output_path.
|
||||
# @RETURN: bool - True if successful.
|
||||
def transform_zip(self, zip_path: str, output_path: str, db_mapping: Dict[str, str], strip_databases: bool = True) -> bool:
|
||||
"""
|
||||
@@ -73,10 +75,14 @@ class MigrationEngine:
|
||||
except Exception as e:
|
||||
logger.error(f"[MigrationEngine.transform_zip][Coherence:Failed] Error transforming ZIP: {e}")
|
||||
return False
|
||||
# [/DEF:MigrationEngine.transform_zip:Function]
|
||||
# [/DEF:transform_zip:Function]
|
||||
|
||||
# [DEF:MigrationEngine._transform_yaml:Function]
|
||||
# [DEF:_transform_yaml:Function]
|
||||
# @PURPOSE: Replaces database_uuid in a single YAML file.
|
||||
# @PARAM: file_path (Path) - Path to the YAML file.
|
||||
# @PARAM: db_mapping (Dict[str, str]) - UUID mapping dictionary.
|
||||
# @PRE: file_path must exist and be readable.
|
||||
# @POST: File is modified in-place if source UUID matches mapping.
|
||||
def _transform_yaml(self, file_path: Path, db_mapping: Dict[str, str]):
|
||||
with open(file_path, 'r') as f:
|
||||
data = yaml.safe_load(f)
|
||||
@@ -91,7 +97,7 @@ class MigrationEngine:
|
||||
data['database_uuid'] = db_mapping[source_uuid]
|
||||
with open(file_path, 'w') as f:
|
||||
yaml.dump(data, f)
|
||||
# [/DEF:MigrationEngine._transform_yaml:Function]
|
||||
# [/DEF:_transform_yaml:Function]
|
||||
|
||||
# [/DEF:MigrationEngine:Class]
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Dict, Any
|
||||
from .logger import belief_scope
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
@@ -17,43 +18,86 @@ class PluginBase(ABC):
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:id:Function]
|
||||
# @PURPOSE: Returns the unique identifier for the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string ID.
|
||||
# @RETURN: str - Plugin ID.
|
||||
def id(self) -> str:
|
||||
"""A unique identifier for the plugin."""
|
||||
pass
|
||||
with belief_scope("id"):
|
||||
pass
|
||||
# [/DEF:id:Function]
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:name:Function]
|
||||
# @PURPOSE: Returns the human-readable name of the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string name.
|
||||
# @RETURN: str - Plugin name.
|
||||
def name(self) -> str:
|
||||
"""A human-readable name for the plugin."""
|
||||
pass
|
||||
with belief_scope("name"):
|
||||
pass
|
||||
# [/DEF:name:Function]
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:description:Function]
|
||||
# @PURPOSE: Returns a brief description of the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string description.
|
||||
# @RETURN: str - Plugin description.
|
||||
def description(self) -> str:
|
||||
"""A brief description of what the plugin does."""
|
||||
pass
|
||||
with belief_scope("description"):
|
||||
pass
|
||||
# [/DEF:description:Function]
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
# [DEF:version:Function]
|
||||
# @PURPOSE: Returns the version of the plugin.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns string version.
|
||||
# @RETURN: str - Plugin version.
|
||||
def version(self) -> str:
|
||||
"""The version of the plugin."""
|
||||
pass
|
||||
with belief_scope("version"):
|
||||
pass
|
||||
# [/DEF:version:Function]
|
||||
|
||||
@abstractmethod
|
||||
# [DEF:get_schema:Function]
|
||||
# @PURPOSE: Returns the JSON schema for the plugin's input parameters.
|
||||
# @PRE: Plugin instance exists.
|
||||
# @POST: Returns dict schema.
|
||||
# @RETURN: Dict[str, Any] - JSON schema.
|
||||
def get_schema(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Returns the JSON schema for the plugin's input parameters.
|
||||
This schema will be used to generate the frontend form.
|
||||
"""
|
||||
pass
|
||||
with belief_scope("get_schema"):
|
||||
pass
|
||||
# [/DEF:get_schema:Function]
|
||||
|
||||
@abstractmethod
|
||||
# [DEF:execute:Function]
|
||||
# @PURPOSE: Executes the plugin's core logic.
|
||||
# @PARAM: params (Dict[str, Any]) - Validated input parameters.
|
||||
# @PRE: params must be a dictionary.
|
||||
# @POST: Plugin execution is completed.
|
||||
async def execute(self, params: Dict[str, Any]):
|
||||
with belief_scope("execute"):
|
||||
pass
|
||||
"""
|
||||
Executes the plugin's logic.
|
||||
The `params` argument will be validated against the schema returned by `get_schema()`.
|
||||
"""
|
||||
pass
|
||||
# [/DEF:execute:Function]
|
||||
# [/DEF:PluginBase:Class]
|
||||
|
||||
# [DEF:PluginConfig:Class]
|
||||
|
||||
@@ -4,6 +4,7 @@ import sys # Added this line
|
||||
from typing import Dict, Type, List, Optional
|
||||
from .plugin_base import PluginBase, PluginConfig
|
||||
from jsonschema import validate
|
||||
from .logger import belief_scope
|
||||
|
||||
# [DEF:PluginLoader:Class]
|
||||
# @SEMANTICS: plugin, loader, dynamic, import
|
||||
@@ -16,22 +17,28 @@ class PluginLoader:
|
||||
that inherit from PluginBase.
|
||||
"""
|
||||
|
||||
# [DEF:PluginLoader.__init__:Function]
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the PluginLoader with a directory to scan.
|
||||
# @PRE: plugin_dir is a valid directory path.
|
||||
# @POST: Plugins are loaded and registered.
|
||||
# @PARAM: plugin_dir (str) - The directory containing plugin modules.
|
||||
def __init__(self, plugin_dir: str):
|
||||
self.plugin_dir = plugin_dir
|
||||
self._plugins: Dict[str, PluginBase] = {}
|
||||
self._plugin_configs: Dict[str, PluginConfig] = {}
|
||||
self._load_plugins()
|
||||
# [/DEF:PluginLoader.__init__:Function]
|
||||
with belief_scope("__init__"):
|
||||
self.plugin_dir = plugin_dir
|
||||
self._plugins: Dict[str, PluginBase] = {}
|
||||
self._plugin_configs: Dict[str, PluginConfig] = {}
|
||||
self._load_plugins()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:PluginLoader._load_plugins:Function]
|
||||
# [DEF:_load_plugins:Function]
|
||||
# @PURPOSE: Scans the plugin directory and loads all valid plugins.
|
||||
# @PRE: plugin_dir exists or can be created.
|
||||
# @POST: _load_module is called for each .py file.
|
||||
def _load_plugins(self):
|
||||
"""
|
||||
Scans the plugin directory, imports modules, and registers valid plugins.
|
||||
"""
|
||||
with belief_scope("_load_plugins"):
|
||||
"""
|
||||
Scans the plugin directory, imports modules, and registers valid plugins.
|
||||
"""
|
||||
if not os.path.exists(self.plugin_dir):
|
||||
os.makedirs(self.plugin_dir)
|
||||
|
||||
@@ -47,16 +54,19 @@ class PluginLoader:
|
||||
module_name = filename[:-3]
|
||||
file_path = os.path.join(self.plugin_dir, filename)
|
||||
self._load_module(module_name, file_path)
|
||||
# [/DEF:PluginLoader._load_plugins:Function]
|
||||
# [/DEF:_load_plugins:Function]
|
||||
|
||||
# [DEF:PluginLoader._load_module:Function]
|
||||
# [DEF:_load_module:Function]
|
||||
# @PURPOSE: Loads a single Python module and discovers PluginBase implementations.
|
||||
# @PRE: module_name and file_path are valid.
|
||||
# @POST: Plugin classes are instantiated and registered.
|
||||
# @PARAM: module_name (str) - The name of the module.
|
||||
# @PARAM: file_path (str) - The path to the module file.
|
||||
def _load_module(self, module_name: str, file_path: str):
|
||||
"""
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
with belief_scope("_load_module"):
|
||||
"""
|
||||
Loads a single Python module and extracts PluginBase subclasses.
|
||||
"""
|
||||
# Try to determine the correct package prefix based on how the app is running
|
||||
# For standalone execution, we need to handle the import differently
|
||||
if __name__ == "__main__" or "test" in __name__:
|
||||
@@ -94,15 +104,18 @@ class PluginLoader:
|
||||
self._register_plugin(plugin_instance)
|
||||
except Exception as e:
|
||||
print(f"Error instantiating plugin {attribute_name} in {module_name}: {e}") # Replace with proper logging
|
||||
# [/DEF:PluginLoader._load_module:Function]
|
||||
# [/DEF:_load_module:Function]
|
||||
|
||||
# [DEF:PluginLoader._register_plugin:Function]
|
||||
# [DEF:_register_plugin:Function]
|
||||
# @PURPOSE: Registers a PluginBase instance and its configuration.
|
||||
# @PRE: plugin_instance is a valid implementation of PluginBase.
|
||||
# @POST: Plugin is added to _plugins and _plugin_configs.
|
||||
# @PARAM: plugin_instance (PluginBase) - The plugin instance to register.
|
||||
def _register_plugin(self, plugin_instance: PluginBase):
|
||||
"""
|
||||
Registers a valid plugin instance.
|
||||
"""
|
||||
with belief_scope("_register_plugin"):
|
||||
"""
|
||||
Registers a valid plugin instance.
|
||||
"""
|
||||
plugin_id = plugin_instance.id
|
||||
if plugin_id in self._plugins:
|
||||
print(f"Warning: Duplicate plugin ID '{plugin_id}' found. Skipping.") # Replace with proper logging
|
||||
@@ -131,39 +144,48 @@ class PluginLoader:
|
||||
except Exception as e:
|
||||
from ..core.logger import logger
|
||||
logger.error(f"Error validating plugin '{plugin_instance.name}' (ID: {plugin_id}): {e}")
|
||||
# [/DEF:PluginLoader._register_plugin:Function]
|
||||
# [/DEF:_register_plugin:Function]
|
||||
|
||||
|
||||
# [DEF:PluginLoader.get_plugin:Function]
|
||||
# [DEF:get_plugin:Function]
|
||||
# @PURPOSE: Retrieves a loaded plugin instance by its ID.
|
||||
# @PRE: plugin_id is a string.
|
||||
# @POST: Returns plugin instance or None.
|
||||
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
|
||||
# @RETURN: Optional[PluginBase] - The plugin instance if found, otherwise None.
|
||||
def get_plugin(self, plugin_id: str) -> Optional[PluginBase]:
|
||||
"""
|
||||
Returns a loaded plugin instance by its ID.
|
||||
"""
|
||||
with belief_scope("get_plugin"):
|
||||
"""
|
||||
Returns a loaded plugin instance by its ID.
|
||||
"""
|
||||
return self._plugins.get(plugin_id)
|
||||
# [/DEF:PluginLoader.get_plugin:Function]
|
||||
# [/DEF:get_plugin:Function]
|
||||
|
||||
# [DEF:PluginLoader.get_all_plugin_configs:Function]
|
||||
# [DEF:get_all_plugin_configs:Function]
|
||||
# @PURPOSE: Returns a list of all registered plugin configurations.
|
||||
# @PRE: None.
|
||||
# @POST: Returns list of all PluginConfig objects.
|
||||
# @RETURN: List[PluginConfig] - A list of plugin configurations.
|
||||
def get_all_plugin_configs(self) -> List[PluginConfig]:
|
||||
"""
|
||||
Returns a list of all loaded plugin configurations.
|
||||
"""
|
||||
with belief_scope("get_all_plugin_configs"):
|
||||
"""
|
||||
Returns a list of all loaded plugin configurations.
|
||||
"""
|
||||
return list(self._plugin_configs.values())
|
||||
# [/DEF:PluginLoader.get_all_plugin_configs:Function]
|
||||
# [/DEF:get_all_plugin_configs:Function]
|
||||
|
||||
# [DEF:PluginLoader.has_plugin:Function]
|
||||
# [DEF:has_plugin:Function]
|
||||
# @PURPOSE: Checks if a plugin with the given ID is registered.
|
||||
# @PRE: plugin_id is a string.
|
||||
# @POST: Returns True if plugin exists.
|
||||
# @PARAM: plugin_id (str) - The unique identifier of the plugin.
|
||||
# @RETURN: bool - True if the plugin is registered, False otherwise.
|
||||
def has_plugin(self, plugin_id: str) -> bool:
|
||||
"""
|
||||
Checks if a plugin with the given ID is loaded.
|
||||
"""
|
||||
with belief_scope("has_plugin"):
|
||||
"""
|
||||
Checks if a plugin with the given ID is loaded.
|
||||
"""
|
||||
return plugin_id in self._plugins
|
||||
# [/DEF:PluginLoader.has_plugin:Function]
|
||||
# [/DEF:has_plugin:Function]
|
||||
|
||||
# [/DEF:PluginLoader:Class]
|
||||
@@ -17,34 +17,45 @@ import asyncio
|
||||
# @SEMANTICS: scheduler, service, apscheduler
|
||||
# @PURPOSE: Provides a service to manage scheduled backup tasks.
|
||||
class SchedulerService:
|
||||
# [DEF:__init__:Function]
|
||||
# @PURPOSE: Initializes the scheduler service with task and config managers.
|
||||
# @PRE: task_manager and config_manager must be provided.
|
||||
# @POST: Scheduler instance is created but not started.
|
||||
def __init__(self, task_manager, config_manager: ConfigManager):
|
||||
with belief_scope("SchedulerService.__init__"):
|
||||
self.task_manager = task_manager
|
||||
self.config_manager = config_manager
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.loop = asyncio.get_event_loop()
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:SchedulerService.start:Function]
|
||||
# [DEF:start:Function]
|
||||
# @PURPOSE: Starts the background scheduler and loads initial schedules.
|
||||
# @PRE: Scheduler should be initialized.
|
||||
# @POST: Scheduler is running and schedules are loaded.
|
||||
def start(self):
|
||||
with belief_scope("SchedulerService.start"):
|
||||
if not self.scheduler.running:
|
||||
self.scheduler.start()
|
||||
logger.info("Scheduler started.")
|
||||
self.load_schedules()
|
||||
# [/DEF:SchedulerService.start:Function]
|
||||
# [/DEF:start:Function]
|
||||
|
||||
# [DEF:SchedulerService.stop:Function]
|
||||
# [DEF:stop:Function]
|
||||
# @PURPOSE: Stops the background scheduler.
|
||||
# @PRE: Scheduler should be running.
|
||||
# @POST: Scheduler is shut down.
|
||||
def stop(self):
|
||||
with belief_scope("SchedulerService.stop"):
|
||||
if self.scheduler.running:
|
||||
self.scheduler.shutdown()
|
||||
logger.info("Scheduler stopped.")
|
||||
# [/DEF:SchedulerService.stop:Function]
|
||||
# [/DEF:stop:Function]
|
||||
|
||||
# [DEF:SchedulerService.load_schedules:Function]
|
||||
# [DEF:load_schedules:Function]
|
||||
# @PURPOSE: Loads backup schedules from configuration and registers them.
|
||||
# @PRE: config_manager must have valid configuration.
|
||||
# @POST: All enabled backup jobs are added to the scheduler.
|
||||
def load_schedules(self):
|
||||
with belief_scope("SchedulerService.load_schedules"):
|
||||
# Clear existing jobs
|
||||
@@ -54,12 +65,14 @@ class SchedulerService:
|
||||
for env in config.environments:
|
||||
if env.backup_schedule and env.backup_schedule.enabled:
|
||||
self.add_backup_job(env.id, env.backup_schedule.cron_expression)
|
||||
# [/DEF:SchedulerService.load_schedules:Function]
|
||||
# [/DEF:load_schedules:Function]
|
||||
|
||||
# [DEF:SchedulerService.add_backup_job:Function]
|
||||
# [DEF:add_backup_job:Function]
|
||||
# @PURPOSE: Adds a scheduled backup job for an environment.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PARAM: cron_expression (str) - The cron expression for the schedule.
|
||||
# @PRE: env_id and cron_expression must be valid strings.
|
||||
# @POST: A new job is added to the scheduler or replaced if it already exists.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PARAM: cron_expression (str) - The cron expression for the schedule.
|
||||
def add_backup_job(self, env_id: str, cron_expression: str):
|
||||
with belief_scope("SchedulerService.add_backup_job", f"env_id={env_id}, cron={cron_expression}"):
|
||||
job_id = f"backup_{env_id}"
|
||||
@@ -74,11 +87,13 @@ class SchedulerService:
|
||||
logger.info(f"Scheduled backup job added for environment {env_id}: {cron_expression}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add backup job for environment {env_id}: {e}")
|
||||
# [/DEF:SchedulerService.add_backup_job:Function]
|
||||
# [/DEF:add_backup_job:Function]
|
||||
|
||||
# [DEF:SchedulerService._trigger_backup:Function]
|
||||
# [DEF:_trigger_backup:Function]
|
||||
# @PURPOSE: Triggered by the scheduler to start a backup task.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
# @PRE: env_id must be a valid environment ID.
|
||||
# @POST: A new backup task is created in the task manager if not already running.
|
||||
# @PARAM: env_id (str) - The ID of the environment.
|
||||
def _trigger_backup(self, env_id: str):
|
||||
with belief_scope("SchedulerService._trigger_backup", f"env_id={env_id}"):
|
||||
logger.info(f"Triggering scheduled backup for environment {env_id}")
|
||||
@@ -98,7 +113,7 @@ class SchedulerService:
|
||||
self.task_manager.create_task("superset-backup", {"environment_id": env_id}),
|
||||
self.loop
|
||||
)
|
||||
# [/DEF:SchedulerService._trigger_backup:Function]
|
||||
# [/DEF:_trigger_backup:Function]
|
||||
|
||||
# [/DEF:SchedulerService:Class]
|
||||
# [/DEF:SchedulerModule:Module]
|
||||
@@ -9,6 +9,7 @@
|
||||
|
||||
# [SECTION: IMPORTS]
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from backend.src.core.logger import belief_scope
|
||||
from superset_tool.client import SupersetClient as BaseSupersetClient
|
||||
from superset_tool.models import SupersetConfig
|
||||
# [/SECTION]
|
||||
@@ -17,88 +18,101 @@ from superset_tool.models import SupersetConfig
|
||||
# @PURPOSE: Extended SupersetClient for migration-specific operations.
|
||||
class SupersetClient(BaseSupersetClient):
|
||||
|
||||
# [DEF:SupersetClient.get_databases_summary:Function]
|
||||
# [DEF:get_databases_summary:Function]
|
||||
# @PURPOSE: Fetch a summary of databases including uuid, name, and engine.
|
||||
# @POST: Returns a list of database dictionaries with 'engine' field.
|
||||
# @RETURN: List[Dict] - Summary of databases.
|
||||
# @PRE: self.network must be initialized and authenticated.
|
||||
# @POST: Returns a list of database dictionaries with 'engine' field.
|
||||
# @RETURN: List[Dict] - Summary of databases.
|
||||
def get_databases_summary(self) -> List[Dict]:
|
||||
"""
|
||||
Fetch a summary of databases including uuid, name, and engine.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["uuid", "database_name", "backend"]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
|
||||
# Map 'backend' to 'engine' for consistency with contracts
|
||||
for db in databases:
|
||||
db['engine'] = db.pop('backend', None)
|
||||
with belief_scope("SupersetClient.get_databases_summary"):
|
||||
"""
|
||||
Fetch a summary of databases including uuid, name, and engine.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["uuid", "database_name", "backend"]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
|
||||
return databases
|
||||
# [/DEF:SupersetClient.get_databases_summary:Function]
|
||||
# Map 'backend' to 'engine' for consistency with contracts
|
||||
for db in databases:
|
||||
db['engine'] = db.pop('backend', None)
|
||||
|
||||
return databases
|
||||
# [/DEF:get_databases_summary:Function]
|
||||
|
||||
# [DEF:SupersetClient.get_database_by_uuid:Function]
|
||||
# [DEF:get_database_by_uuid:Function]
|
||||
# @PURPOSE: Find a database by its UUID.
|
||||
# @PARAM: db_uuid (str) - The UUID of the database.
|
||||
# @RETURN: Optional[Dict] - Database info if found, else None.
|
||||
# @PRE: db_uuid must be a string.
|
||||
# @POST: Returns database metadata if found.
|
||||
# @PARAM: db_uuid (str) - The UUID of the database.
|
||||
# @RETURN: Optional[Dict] - Database info if found, else None.
|
||||
def get_database_by_uuid(self, db_uuid: str) -> Optional[Dict]:
|
||||
"""
|
||||
Find a database by its UUID.
|
||||
"""
|
||||
query = {
|
||||
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:SupersetClient.get_database_by_uuid:Function]
|
||||
with belief_scope("SupersetClient.get_database_by_uuid", f"uuid={db_uuid}"):
|
||||
"""
|
||||
Find a database by its UUID.
|
||||
"""
|
||||
query = {
|
||||
"filters": [{"col": "uuid", "op": "eq", "value": db_uuid}]
|
||||
}
|
||||
_, databases = self.get_databases(query=query)
|
||||
return databases[0] if databases else None
|
||||
# [/DEF:get_database_by_uuid:Function]
|
||||
|
||||
# [DEF:SupersetClient.get_dashboards_summary:Function]
|
||||
# [DEF:get_dashboards_summary:Function]
|
||||
# @PURPOSE: Fetches dashboard metadata optimized for the grid.
|
||||
# @POST: Returns a list of dashboard dictionaries.
|
||||
# @RETURN: List[Dict]
|
||||
# @PRE: self.network must be authenticated.
|
||||
# @POST: Returns a list of dashboard dictionaries mapped to the grid schema.
|
||||
# @RETURN: List[Dict]
|
||||
def get_dashboards_summary(self) -> List[Dict]:
|
||||
"""
|
||||
Fetches dashboard metadata optimized for the grid.
|
||||
Returns a list of dictionaries mapped to DashboardMetadata fields.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
|
||||
}
|
||||
_, dashboards = self.get_dashboards(query=query)
|
||||
with belief_scope("SupersetClient.get_dashboards_summary"):
|
||||
"""
|
||||
Fetches dashboard metadata optimized for the grid.
|
||||
Returns a list of dictionaries mapped to DashboardMetadata fields.
|
||||
"""
|
||||
query = {
|
||||
"columns": ["id", "dashboard_title", "changed_on_utc", "published"]
|
||||
}
|
||||
_, dashboards = self.get_dashboards(query=query)
|
||||
|
||||
# Map fields to DashboardMetadata schema
|
||||
result = []
|
||||
for dash in dashboards:
|
||||
result.append({
|
||||
"id": dash.get("id"),
|
||||
"title": dash.get("dashboard_title"),
|
||||
"last_modified": dash.get("changed_on_utc"),
|
||||
"status": "published" if dash.get("published") else "draft"
|
||||
})
|
||||
return result
|
||||
# [/DEF:SupersetClient.get_dashboards_summary:Function]
|
||||
# Map fields to DashboardMetadata schema
|
||||
result = []
|
||||
for dash in dashboards:
|
||||
result.append({
|
||||
"id": dash.get("id"),
|
||||
"title": dash.get("dashboard_title"),
|
||||
"last_modified": dash.get("changed_on_utc"),
|
||||
"status": "published" if dash.get("published") else "draft"
|
||||
})
|
||||
return result
|
||||
# [/DEF:get_dashboards_summary:Function]
|
||||
|
||||
# [DEF:SupersetClient.get_dataset:Function]
|
||||
# [DEF:get_dataset:Function]
|
||||
# @PURPOSE: Fetch full dataset structure including columns and metrics.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @RETURN: Dict - The dataset metadata.
|
||||
# @PRE: dataset_id must be a valid integer.
|
||||
# @POST: Returns full dataset metadata from Superset API.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @RETURN: Dict - The dataset metadata.
|
||||
def get_dataset(self, dataset_id: int) -> Dict:
|
||||
"""
|
||||
Fetch full dataset structure.
|
||||
"""
|
||||
return self.network.get(f"/api/v1/dataset/{dataset_id}").json()
|
||||
# [/DEF:SupersetClient.get_dataset:Function]
|
||||
with belief_scope("SupersetClient.get_dataset", f"id={dataset_id}"):
|
||||
"""
|
||||
Fetch full dataset structure.
|
||||
"""
|
||||
return self.network.get(f"/api/v1/dataset/{dataset_id}").json()
|
||||
# [/DEF:get_dataset:Function]
|
||||
|
||||
# [DEF:SupersetClient.update_dataset:Function]
|
||||
# [DEF:update_dataset:Function]
|
||||
# @PURPOSE: Update dataset metadata.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @PARAM: data (Dict) - The payload for update.
|
||||
# @PRE: dataset_id must be valid, data must be a valid Superset dataset payload.
|
||||
# @POST: Dataset is updated in Superset.
|
||||
# @PARAM: dataset_id (int) - The ID of the dataset.
|
||||
# @PARAM: data (Dict) - The payload for update.
|
||||
def update_dataset(self, dataset_id: int, data: Dict):
|
||||
"""
|
||||
Update dataset metadata.
|
||||
"""
|
||||
self.network.put(f"/api/v1/dataset/{dataset_id}", json=data)
|
||||
# [/DEF:SupersetClient.update_dataset:Function]
|
||||
with belief_scope("SupersetClient.update_dataset", f"id={dataset_id}"):
|
||||
"""
|
||||
Update dataset metadata.
|
||||
"""
|
||||
self.network.put(f"/api/v1/dataset/{dataset_id}", json=data)
|
||||
# [/DEF:update_dataset:Function]
|
||||
|
||||
# [/DEF:SupersetClient:Class]
|
||||
|
||||
|
||||
@@ -186,17 +186,23 @@ class TaskManager:
|
||||
|
||||
# [DEF:get_task:Function]
|
||||
# @PURPOSE: Retrieves a task by its ID.
|
||||
# @PRE: task_id is a string.
|
||||
# @POST: Returns Task object or None.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: Optional[Task] - The task or None.
|
||||
def get_task(self, task_id: str) -> Optional[Task]:
|
||||
return self.tasks.get(task_id)
|
||||
with belief_scope("TaskManager.get_task", f"task_id={task_id}"):
|
||||
return self.tasks.get(task_id)
|
||||
# [/DEF:get_task:Function]
|
||||
|
||||
# [DEF:get_all_tasks:Function]
|
||||
# @PURPOSE: Retrieves all registered tasks.
|
||||
# @PRE: None.
|
||||
# @POST: Returns list of all Task objects.
|
||||
# @RETURN: List[Task] - All tasks.
|
||||
def get_all_tasks(self) -> List[Task]:
|
||||
return list(self.tasks.values())
|
||||
with belief_scope("TaskManager.get_all_tasks"):
|
||||
return list(self.tasks.values())
|
||||
# [/DEF:get_all_tasks:Function]
|
||||
|
||||
# [DEF:get_tasks:Function]
|
||||
@@ -208,7 +214,8 @@ class TaskManager:
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @RETURN: List[Task] - List of tasks matching criteria.
|
||||
def get_tasks(self, limit: int = 10, offset: int = 0, status: Optional[TaskStatus] = None) -> List[Task]:
|
||||
tasks = list(self.tasks.values())
|
||||
with belief_scope("TaskManager.get_tasks"):
|
||||
tasks = list(self.tasks.values())
|
||||
if status:
|
||||
tasks = [t for t in tasks if t.status == status]
|
||||
# Sort by start_time descending (most recent first)
|
||||
@@ -218,11 +225,14 @@ class TaskManager:
|
||||
|
||||
# [DEF:get_task_logs:Function]
|
||||
# @PURPOSE: Retrieves logs for a specific task.
|
||||
# @PRE: task_id is a string.
|
||||
# @POST: Returns list of LogEntry objects.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: List[LogEntry] - List of log entries.
|
||||
def get_task_logs(self, task_id: str) -> List[LogEntry]:
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
with belief_scope("TaskManager.get_task_logs", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
return task.logs if task else []
|
||||
# [/DEF:get_task_logs:Function]
|
||||
|
||||
# [DEF:_add_log:Function]
|
||||
@@ -234,51 +244,61 @@ class TaskManager:
|
||||
# @PARAM: message (str) - Log message.
|
||||
# @PARAM: context (Optional[Dict]) - Log context.
|
||||
def _add_log(self, task_id: str, level: str, message: str, context: Optional[Dict[str, Any]] = None):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
return
|
||||
with belief_scope("TaskManager._add_log", f"task_id={task_id}"):
|
||||
task = self.tasks.get(task_id)
|
||||
if not task:
|
||||
return
|
||||
|
||||
log_entry = LogEntry(level=level, message=message, context=context)
|
||||
task.logs.append(log_entry)
|
||||
self.persistence_service.persist_task(task)
|
||||
log_entry = LogEntry(level=level, message=message, context=context)
|
||||
task.logs.append(log_entry)
|
||||
self.persistence_service.persist_task(task)
|
||||
|
||||
# Notify subscribers
|
||||
if task_id in self.subscribers:
|
||||
for queue in self.subscribers[task_id]:
|
||||
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
|
||||
# Notify subscribers
|
||||
if task_id in self.subscribers:
|
||||
for queue in self.subscribers[task_id]:
|
||||
self.loop.call_soon_threadsafe(queue.put_nowait, log_entry)
|
||||
# [/DEF:_add_log:Function]
|
||||
|
||||
# [DEF:subscribe_logs:Function]
|
||||
# @PURPOSE: Subscribes to real-time logs for a task.
|
||||
# @PRE: task_id is a string.
|
||||
# @POST: Returns an asyncio.Queue for log entries.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @RETURN: asyncio.Queue - Queue for log entries.
|
||||
async def subscribe_logs(self, task_id: str) -> asyncio.Queue:
|
||||
queue = asyncio.Queue()
|
||||
if task_id not in self.subscribers:
|
||||
self.subscribers[task_id] = []
|
||||
self.subscribers[task_id].append(queue)
|
||||
return queue
|
||||
with belief_scope("TaskManager.subscribe_logs", f"task_id={task_id}"):
|
||||
queue = asyncio.Queue()
|
||||
if task_id not in self.subscribers:
|
||||
self.subscribers[task_id] = []
|
||||
self.subscribers[task_id].append(queue)
|
||||
return queue
|
||||
# [/DEF:subscribe_logs:Function]
|
||||
|
||||
# [DEF:unsubscribe_logs:Function]
|
||||
# @PURPOSE: Unsubscribes from real-time logs for a task.
|
||||
# @PRE: task_id is a string, queue is asyncio.Queue.
|
||||
# @POST: Queue removed from subscribers.
|
||||
# @PARAM: task_id (str) - ID of the task.
|
||||
# @PARAM: queue (asyncio.Queue) - Queue to remove.
|
||||
def unsubscribe_logs(self, task_id: str, queue: asyncio.Queue):
|
||||
if task_id in self.subscribers:
|
||||
if queue in self.subscribers[task_id]:
|
||||
self.subscribers[task_id].remove(queue)
|
||||
if not self.subscribers[task_id]:
|
||||
del self.subscribers[task_id]
|
||||
with belief_scope("TaskManager.unsubscribe_logs", f"task_id={task_id}"):
|
||||
if task_id in self.subscribers:
|
||||
if queue in self.subscribers[task_id]:
|
||||
self.subscribers[task_id].remove(queue)
|
||||
if not self.subscribers[task_id]:
|
||||
del self.subscribers[task_id]
|
||||
# [/DEF:unsubscribe_logs:Function]
|
||||
|
||||
# [DEF:load_persisted_tasks:Function]
|
||||
# @PURPOSE: Load persisted tasks using persistence service.
|
||||
# @PRE: None.
|
||||
# @POST: Persisted tasks loaded into self.tasks.
|
||||
def load_persisted_tasks(self) -> None:
|
||||
loaded_tasks = self.persistence_service.load_tasks(limit=100)
|
||||
for task in loaded_tasks:
|
||||
if task.id not in self.tasks:
|
||||
self.tasks[task.id] = task
|
||||
with belief_scope("TaskManager.load_persisted_tasks"):
|
||||
loaded_tasks = self.persistence_service.load_tasks(limit=100)
|
||||
for task in loaded_tasks:
|
||||
if task.id not in self.tasks:
|
||||
self.tasks[task.id] = task
|
||||
# [/DEF:load_persisted_tasks:Function]
|
||||
|
||||
# [DEF:await_input:Function]
|
||||
@@ -334,6 +354,8 @@ class TaskManager:
|
||||
|
||||
# [DEF:clear_tasks:Function]
|
||||
# @PURPOSE: Clears tasks based on status filter.
|
||||
# @PRE: status is Optional[TaskStatus].
|
||||
# @POST: Tasks matching filter (or all non-active) cleared from registry and database.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by task status.
|
||||
# @RETURN: int - Number of tasks cleared.
|
||||
def clear_tasks(self, status: Optional[TaskStatus] = None) -> int:
|
||||
|
||||
@@ -26,12 +26,15 @@ class TaskPersistenceService:
|
||||
# @PRE: None.
|
||||
# @POST: Service is ready.
|
||||
def __init__(self):
|
||||
# We use TasksSessionLocal from database.py
|
||||
pass
|
||||
with belief_scope("TaskPersistenceService.__init__"):
|
||||
# We use TasksSessionLocal from database.py
|
||||
pass
|
||||
# [/DEF:__init__:Function]
|
||||
|
||||
# [DEF:persist_task:Function]
|
||||
# @PURPOSE: Persists or updates a single task in the database.
|
||||
# @PRE: isinstance(task, Task)
|
||||
# @POST: Task record created or updated in database.
|
||||
# @PARAM: task (Task) - The task object to persist.
|
||||
def persist_task(self, task: Task) -> None:
|
||||
with belief_scope("TaskPersistenceService.persist_task", f"task_id={task.id}"):
|
||||
@@ -75,14 +78,19 @@ class TaskPersistenceService:
|
||||
|
||||
# [DEF:persist_tasks:Function]
|
||||
# @PURPOSE: Persists multiple tasks.
|
||||
# @PRE: isinstance(tasks, list)
|
||||
# @POST: All tasks in list are persisted.
|
||||
# @PARAM: tasks (List[Task]) - The list of tasks to persist.
|
||||
def persist_tasks(self, tasks: List[Task]) -> None:
|
||||
for task in tasks:
|
||||
self.persist_task(task)
|
||||
with belief_scope("TaskPersistenceService.persist_tasks"):
|
||||
for task in tasks:
|
||||
self.persist_task(task)
|
||||
# [/DEF:persist_tasks:Function]
|
||||
|
||||
# [DEF:load_tasks:Function]
|
||||
# @PURPOSE: Loads tasks from the database.
|
||||
# @PRE: limit is an integer.
|
||||
# @POST: Returns list of Task objects.
|
||||
# @PARAM: limit (int) - Max tasks to load.
|
||||
# @PARAM: status (Optional[TaskStatus]) - Filter by status.
|
||||
# @RETURN: List[Task] - The loaded tasks.
|
||||
@@ -128,6 +136,8 @@ class TaskPersistenceService:
|
||||
|
||||
# [DEF:delete_tasks:Function]
|
||||
# @PURPOSE: Deletes specific tasks from the database.
|
||||
# @PRE: task_ids is a list of strings.
|
||||
# @POST: Specified task records deleted from database.
|
||||
# @PARAM: task_ids (List[str]) - List of task IDs to delete.
|
||||
def delete_tasks(self, task_ids: List[str]) -> None:
|
||||
if not task_ids:
|
||||
|
||||
Reference in New Issue
Block a user