Fix task API stability and Playwright runtime in Docker

This commit is contained in:
2026-02-21 23:43:46 +03:00
parent 6ffdf5f8a4
commit f0c85e4c03
6 changed files with 75 additions and 18 deletions

View File

@@ -25,6 +25,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY backend/requirements.txt /app/backend/requirements.txt COPY backend/requirements.txt /app/backend/requirements.txt
RUN pip install --no-cache-dir -r /app/backend/requirements.txt RUN pip install --no-cache-dir -r /app/backend/requirements.txt
RUN python -m playwright install --with-deps chromium
COPY backend/ /app/backend/ COPY backend/ /app/backend/
COPY --from=frontend-build /app/frontend/build /app/frontend/build COPY --from=frontend-build /app/frontend/build /app/frontend/build

View File

@@ -54,3 +54,4 @@ email-validator
openai openai
playwright playwright
tenacity tenacity
Pillow

View File

@@ -11,7 +11,7 @@ import asyncio
import threading import threading
import inspect import inspect
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from datetime import datetime from datetime import datetime, timezone
from typing import Dict, Any, List, Optional from typing import Dict, Any, List, Optional
from .models import Task, TaskStatus, LogEntry, LogFilter, LogStats from .models import Task, TaskStatus, LogEntry, LogFilter, LogStats
@@ -329,8 +329,18 @@ class TaskManager:
tasks = [t for t in tasks if t.plugin_id in plugin_id_set] tasks = [t for t in tasks if t.plugin_id in plugin_id_set]
if completed_only: if completed_only:
tasks = [t for t in tasks if t.status in [TaskStatus.SUCCESS, TaskStatus.FAILED]] tasks = [t for t in tasks if t.status in [TaskStatus.SUCCESS, TaskStatus.FAILED]]
# Sort by start_time descending (most recent first) # Sort by started_at descending with tolerant handling of mixed tz-aware/naive values.
tasks.sort(key=lambda t: t.started_at or datetime.min, reverse=True) def sort_key(task: Task) -> float:
started_at = task.started_at
if started_at is None:
return float("-inf")
if not isinstance(started_at, datetime):
return float("-inf")
if started_at.tzinfo is None:
return started_at.replace(tzinfo=timezone.utc).timestamp()
return started_at.timestamp()
tasks.sort(key=sort_key, reverse=True)
return tasks[offset:offset + limit] return tasks[offset:offset + limit]
# [/DEF:get_tasks:Function] # [/DEF:get_tasks:Function]

View File

@@ -109,7 +109,8 @@ class Task(BaseModel):
params: Dict[str, Any] = Field(default_factory=dict) params: Dict[str, Any] = Field(default_factory=dict)
input_required: bool = False input_required: bool = False
input_request: Optional[Dict[str, Any]] = None input_request: Optional[Dict[str, Any]] = None
result: Optional[Dict[str, Any]] = None # Result payload can be dict/list/scalar depending on plugin and legacy records.
result: Optional[Any] = None
# [DEF:__init__:Function] # [DEF:__init__:Function]
# @PURPOSE: Initializes the Task model and validates input_request for AWAITING_INPUT status. # @PURPOSE: Initializes the Task model and validates input_request for AWAITING_INPUT status.

View File

@@ -12,6 +12,7 @@ import json
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from ...models.task import TaskRecord, TaskLogRecord from ...models.task import TaskRecord, TaskLogRecord
from ...models.mapping import Environment
from ..database import TasksSessionLocal from ..database import TasksSessionLocal
from .models import Task, TaskStatus, LogEntry, TaskLog, LogFilter, LogStats from .models import Task, TaskStatus, LogEntry, TaskLog, LogFilter, LogStats
from ..logger import logger, belief_scope from ..logger import logger, belief_scope
@@ -21,6 +22,40 @@ from ..logger import logger, belief_scope
# @SEMANTICS: persistence, service, database, sqlalchemy # @SEMANTICS: persistence, service, database, sqlalchemy
# @PURPOSE: Provides methods to save and load tasks from the tasks.db database using SQLAlchemy. # @PURPOSE: Provides methods to save and load tasks from the tasks.db database using SQLAlchemy.
class TaskPersistenceService: class TaskPersistenceService:
@staticmethod
def _json_load_if_needed(value):
if value is None:
return None
if isinstance(value, (dict, list)):
return value
if isinstance(value, str):
stripped = value.strip()
if stripped == "" or stripped.lower() == "null":
return None
try:
return json.loads(stripped)
except json.JSONDecodeError:
return value
return value
@staticmethod
def _parse_datetime(value):
if value is None or isinstance(value, datetime):
return value
if isinstance(value, str):
try:
return datetime.fromisoformat(value)
except ValueError:
return None
return None
@staticmethod
def _resolve_environment_id(session: Session, env_id: Optional[str]) -> Optional[str]:
if not env_id:
return None
exists = session.query(Environment.id).filter(Environment.id == env_id).first()
return env_id if exists else None
# [DEF:__init__:Function] # [DEF:__init__:Function]
# @PURPOSE: Initializes the persistence service. # @PURPOSE: Initializes the persistence service.
# @PRE: None. # @PRE: None.
@@ -48,7 +83,8 @@ class TaskPersistenceService:
record.type = task.plugin_id record.type = task.plugin_id
record.status = task.status.value record.status = task.status.value
record.environment_id = task.params.get("environment_id") or task.params.get("source_env_id") raw_env_id = task.params.get("environment_id") or task.params.get("source_env_id")
record.environment_id = self._resolve_environment_id(session, raw_env_id)
record.started_at = task.started_at record.started_at = task.started_at
record.finished_at = task.finished_at record.finished_at = task.finished_at
@@ -123,21 +159,28 @@ class TaskPersistenceService:
for record in records: for record in records:
try: try:
logs = [] logs = []
if record.logs: logs_payload = self._json_load_if_needed(record.logs)
for log_data in record.logs: if isinstance(logs_payload, list):
# Handle timestamp conversion if it's a string for log_data in logs_payload:
if isinstance(log_data.get('timestamp'), str): if not isinstance(log_data, dict):
log_data['timestamp'] = datetime.fromisoformat(log_data['timestamp']) continue
log_data = dict(log_data)
log_data['timestamp'] = self._parse_datetime(log_data.get('timestamp')) or datetime.utcnow()
logs.append(LogEntry(**log_data)) logs.append(LogEntry(**log_data))
started_at = self._parse_datetime(record.started_at)
finished_at = self._parse_datetime(record.finished_at)
params = self._json_load_if_needed(record.params)
result = self._json_load_if_needed(record.result)
task = Task( task = Task(
id=record.id, id=record.id,
plugin_id=record.type, plugin_id=record.type,
status=TaskStatus(record.status), status=TaskStatus(record.status),
started_at=record.started_at, started_at=started_at,
finished_at=record.finished_at, finished_at=finished_at,
params=record.params or {}, params=params if isinstance(params, dict) else {},
result=record.result, result=result,
logs=logs logs=logs
) )
loaded_tasks.append(task) loaded_tasks.append(task)

View File

@@ -74,7 +74,8 @@ class DashboardValidationPlugin(PluginBase):
log.info(f"Executing {self.name} with params: {params}") log.info(f"Executing {self.name} with params: {params}")
dashboard_id = params.get("dashboard_id") dashboard_id_raw = params.get("dashboard_id")
dashboard_id = str(dashboard_id_raw) if dashboard_id_raw is not None else None
env_id = params.get("environment_id") env_id = params.get("environment_id")
provider_id = params.get("provider_id") provider_id = params.get("provider_id")