db + docker

This commit is contained in:
2026-02-20 20:47:39 +03:00
parent af74841765
commit 0cf0ef25f1
18 changed files with 2711 additions and 588 deletions

View File

@@ -1,13 +1,14 @@
# [DEF:generate_semantic_map:Module]
#
# @TIER: CRITICAL
# @SEMANTICS: semantic_analysis, parser, map_generator, compliance_checker, tier_validation, svelte_props, data_flow
# @PURPOSE: Scans the codebase to generate a Semantic Map and Compliance Report based on the System Standard.
# @SEMANTICS: semantic_analysis, parser, map_generator, compliance_checker, tier_validation, svelte_props, data_flow, module_map
# @PURPOSE: Scans the codebase to generate a Semantic Map, Module Map, and Compliance Report based on the System Standard.
# @LAYER: DevOps/Tooling
# @INVARIANT: All DEF anchors must have matching closing anchors; TIER determines validation strictness.
# @RELATION: READS -> FileSystem
# @RELATION: PRODUCES -> semantics/semantic_map.json
# @RELATION: PRODUCES -> .ai/PROJECT_MAP.md
# @RELATION: PRODUCES -> .ai/MODULE_MAP.md
# @RELATION: PRODUCES -> semantics/reports/semantic_report_*.md
# [SECTION: IMPORTS]
@@ -83,6 +84,7 @@ IGNORE_FILES = {
}
OUTPUT_JSON = "semantics/semantic_map.json"
OUTPUT_COMPRESSED_MD = ".ai/PROJECT_MAP.md"
OUTPUT_MODULE_MAP_MD = ".ai/MODULE_MAP.md"
REPORTS_DIR = "semantics/reports"
# Tier-based mandatory tags
@@ -830,6 +832,7 @@ class SemanticMapGenerator:
self._generate_report()
self._generate_compressed_map()
self._generate_module_map()
# [/DEF:_generate_artifacts:Function]
# [DEF:_generate_report:Function]
@@ -990,6 +993,163 @@ class SemanticMapGenerator:
self._write_entity_md(f, child, level + 1)
# [/DEF:_write_entity_md:Function]
# [DEF:_generate_module_map:Function]
# @TIER: CRITICAL
# @PURPOSE: Generates a module-centric map grouping entities by directory structure.
# @PRE: Entities have been processed.
# @POST: Markdown module map is written to .ai/MODULE_MAP.md.
def _generate_module_map(self):
with belief_scope("_generate_module_map"):
os.makedirs(os.path.dirname(OUTPUT_MODULE_MAP_MD), exist_ok=True)
# Group entities by directory/module
modules: Dict[str, Dict[str, Any]] = {}
# [DEF:_get_module_path:Function]
# @TIER: STANDARD
# @PURPOSE: Extracts the module path from a file path.
# @PRE: file_path is a valid relative path.
# @POST: Returns a module path string.
def _get_module_path(file_path: str) -> str:
# Convert file path to module-like path
parts = file_path.replace(os.sep, '/').split('/')
# Remove filename
if len(parts) > 1:
return '/'.join(parts[:-1])
return 'root'
# [/DEF:_get_module_path:Function]
# [DEF:_collect_all_entities:Function]
# @TIER: STANDARD
# @PURPOSE: Flattens entity tree for easier grouping.
# @PRE: entity list is valid.
# @POST: Returns flat list of all entities with their hierarchy.
def _collect_all_entities(entities: List[SemanticEntity], result: List[Tuple[str, SemanticEntity]]):
for e in entities:
result.append((_get_module_path(e.file_path), e))
_collect_all_entities(e.children, result)
# [/DEF:_collect_all_entities:Function]
# Collect all entities
all_entities: List[Tuple[str, SemanticEntity]] = []
_collect_all_entities(self.entities, all_entities)
# Group by module path
for module_path, entity in all_entities:
if module_path not in modules:
modules[module_path] = {
'entities': [],
'files': set(),
'layers': set(),
'tiers': {'CRITICAL': 0, 'STANDARD': 0, 'TRIVIAL': 0},
'relations': []
}
modules[module_path]['entities'].append(entity)
modules[module_path]['files'].add(entity.file_path)
if entity.tags.get('LAYER'):
modules[module_path]['layers'].add(entity.tags.get('LAYER'))
tier = entity.get_tier().value
modules[module_path]['tiers'][tier] = modules[module_path]['tiers'].get(tier, 0) + 1
for rel in entity.relations:
modules[module_path]['relations'].append(rel)
# Write module map
with open(OUTPUT_MODULE_MAP_MD, 'w', encoding='utf-8') as f:
f.write("# Module Map\n\n")
f.write("> High-level module structure for AI Context. Generated automatically.\n\n")
f.write(f"**Generated:** {datetime.datetime.now().isoformat()}\n\n")
# Summary statistics
total_modules = len(modules)
total_entities = len(all_entities)
f.write("## Summary\n\n")
f.write(f"- **Total Modules:** {total_modules}\n")
f.write(f"- **Total Entities:** {total_entities}\n\n")
# Module hierarchy
f.write("## Module Hierarchy\n\n")
# Sort modules by path for consistent output
sorted_modules = sorted(modules.items(), key=lambda x: x[0])
for module_path, data in sorted_modules:
# Calculate module depth for indentation
depth = module_path.count('/')
indent = " " * depth
# Module header
module_name = module_path.split('/')[-1] if module_path != 'root' else 'root'
f.write(f"{indent}### 📁 `{module_name}/`\n\n")
# Module metadata
if data['layers']:
layers_str = ", ".join(sorted(data['layers']))
f.write(f"{indent}- 🏗️ **Layers:** {layers_str}\n")
tiers_summary = []
for tier_name, count in data['tiers'].items():
if count > 0:
tiers_summary.append(f"{tier_name}: {count}")
if tiers_summary:
f.write(f"{indent}- 📊 **Tiers:** {', '.join(tiers_summary)}\n")
f.write(f"{indent}- 📄 **Files:** {len(data['files'])}\n")
f.write(f"{indent}- 📦 **Entities:** {len(data['entities'])}\n")
# List key entities (Modules, Classes, Components only)
key_entities = [e for e in data['entities'] if e.type in ['Module', 'Class', 'Component', 'Store']]
if key_entities:
f.write(f"\n{indent}**Key Entities:**\n\n")
for entity in sorted(key_entities, key=lambda x: (x.type, x.name))[:10]:
icon = "📦" if entity.type == "Module" else "" if entity.type == "Class" else "🧩" if entity.type == "Component" else "🗄️"
tier_badge = ""
if entity.get_tier() == Tier.CRITICAL:
tier_badge = " `[CRITICAL]`"
elif entity.get_tier() == Tier.TRIVIAL:
tier_badge = " `[TRIVIAL]`"
purpose = entity.tags.get('PURPOSE', '')[:60] + "..." if entity.tags.get('PURPOSE') and len(entity.tags.get('PURPOSE', '')) > 60 else entity.tags.get('PURPOSE', '')
f.write(f"{indent} - {icon} **{entity.name}** ({entity.type}){tier_badge}\n")
if purpose:
f.write(f"{indent} - {purpose}\n")
# External relations
external_relations = [r for r in data['relations'] if r['type'] in ['DEPENDS_ON', 'IMPLEMENTS', 'INHERITS']]
if external_relations:
unique_deps = {}
for rel in external_relations:
key = f"{rel['type']} -> {rel['target']}"
unique_deps[key] = rel
f.write(f"\n{indent}**Dependencies:**\n\n")
for rel_str in sorted(unique_deps.keys())[:5]:
f.write(f"{indent} - 🔗 {rel_str}\n")
f.write("\n")
# Cross-module dependency graph
f.write("## Cross-Module Dependencies\n\n")
f.write("```mermaid\n")
f.write("graph TD\n")
# Find inter-module dependencies
for module_path, data in sorted_modules:
module_name = module_path.split('/')[-1] if module_path != 'root' else 'root'
safe_name = module_name.replace('-', '_').replace('.', '_')
for rel in data['relations']:
target = rel.get('target', '')
# Check if target references another module
for other_module in modules:
if other_module != module_path and other_module in target:
other_name = other_module.split('/')[-1]
safe_other = other_name.replace('-', '_').replace('.', '_')
f.write(f" {safe_name}-->|{rel['type']}|{safe_other}\n")
break
f.write("```\n")
print(f"Generated {OUTPUT_MODULE_MAP_MD}")
# [/DEF:_generate_module_map:Function]
# [/DEF:SemanticMapGenerator:Class]