# [DEF:backend/src/plugins/llm_analysis/models.py:Module] # @TIER: STANDARD # @SEMANTICS: pydantic, models, llm # @PURPOSE: Define Pydantic models for LLM Analysis plugin. # @LAYER: Domain from typing import List, Optional from pydantic import BaseModel, Field from datetime import datetime from enum import Enum # [DEF:LLMProviderType:Class] # @PURPOSE: Enum for supported LLM providers. class LLMProviderType(str, Enum): OPENAI = "openai" OPENROUTER = "openrouter" KILO = "kilo" # [/DEF:LLMProviderType:Class] # [DEF:LLMProviderConfig:Class] # @PURPOSE: Configuration for an LLM provider. class LLMProviderConfig(BaseModel): id: Optional[str] = None provider_type: LLMProviderType name: str base_url: str api_key: Optional[str] = None default_model: str is_active: bool = True # [/DEF:LLMProviderConfig:Class] # [DEF:ValidationStatus:Class] # @PURPOSE: Enum for dashboard validation status. class ValidationStatus(str, Enum): PASS = "PASS" WARN = "WARN" FAIL = "FAIL" # [/DEF:ValidationStatus:Class] # [DEF:DetectedIssue:Class] # @PURPOSE: Model for a single issue detected during validation. class DetectedIssue(BaseModel): severity: ValidationStatus message: str location: Optional[str] = None # [/DEF:DetectedIssue:Class] # [DEF:ValidationResult:Class] # @PURPOSE: Model for dashboard validation result. class ValidationResult(BaseModel): id: Optional[str] = None dashboard_id: str timestamp: datetime = Field(default_factory=datetime.utcnow) status: ValidationStatus screenshot_path: Optional[str] = None issues: List[DetectedIssue] summary: str raw_response: Optional[str] = None # [/DEF:ValidationResult:Class] # [/DEF:backend/src/plugins/llm_analysis/models.py:Module]