978 lines
37 KiB
Python
978 lines
37 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Design Document Validator and Dependency Graph Generator
|
|
|
|
Validates design_document.yml and generates:
|
|
1. dependency_graph.yml - Layered execution order
|
|
2. Context snapshots for each task
|
|
3. Tasks with full context
|
|
"""
|
|
|
|
import argparse
|
|
import json
|
|
import os
|
|
import sys
|
|
from collections import defaultdict
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional, Set, Tuple
|
|
|
|
# Try to import yaml
|
|
try:
|
|
import yaml
|
|
HAS_YAML = True
|
|
except ImportError:
|
|
HAS_YAML = False
|
|
print("Warning: PyYAML not installed. Using basic parser.", file=sys.stderr)
|
|
|
|
|
|
# ============================================================================
|
|
# YAML Helpers
|
|
# ============================================================================
|
|
|
|
def load_yaml(filepath: str) -> dict:
|
|
"""Load YAML file."""
|
|
if not os.path.exists(filepath):
|
|
return {}
|
|
with open(filepath, 'r') as f:
|
|
content = f.read()
|
|
if not content.strip():
|
|
return {}
|
|
|
|
if HAS_YAML:
|
|
return yaml.safe_load(content) or {}
|
|
|
|
# Enhanced fallback parser with nested structure support
|
|
return parse_yaml_fallback(content)
|
|
|
|
|
|
def parse_yaml_fallback(content: str) -> dict:
|
|
"""Parse YAML with nested dict/list support without PyYAML dependency."""
|
|
def parse_value(val: str):
|
|
"""Parse a YAML value string into Python type."""
|
|
val = val.strip()
|
|
if val in ('null', '~', ''):
|
|
return None
|
|
if val == 'true':
|
|
return True
|
|
if val == 'false':
|
|
return False
|
|
if val == '[]':
|
|
return []
|
|
if val == '{}':
|
|
return {}
|
|
if val.lstrip('-').isdigit():
|
|
return int(val)
|
|
try:
|
|
if '.' in val and val.replace('.', '').lstrip('-').isdigit():
|
|
return float(val)
|
|
except ValueError:
|
|
pass
|
|
if len(val) >= 2 and val[0] in ('"', "'") and val[-1] == val[0]:
|
|
return val[1:-1]
|
|
return val
|
|
|
|
lines = content.split('\n')
|
|
result = {}
|
|
stack = [(result, -1, 'dict')] # (container, indent, type)
|
|
|
|
i = 0
|
|
while i < len(lines):
|
|
line = lines[i]
|
|
stripped = line.strip()
|
|
|
|
# Skip empty lines and comments
|
|
if not stripped or stripped.startswith('#'):
|
|
i += 1
|
|
continue
|
|
|
|
indent = len(line) - len(line.lstrip())
|
|
|
|
# Pop stack to correct level
|
|
while len(stack) > 1 and stack[-1][1] >= indent:
|
|
stack.pop()
|
|
|
|
container, _, container_type = stack[-1]
|
|
|
|
# Handle list items
|
|
if stripped.startswith('- '):
|
|
item_content = stripped[2:].strip()
|
|
|
|
# Ensure parent is a list
|
|
if container_type == 'dict' and len(stack) > 1:
|
|
# Convert last dict entry to list
|
|
parent, parent_indent, _ = stack[-2] if len(stack) > 1 else (result, -1, 'dict')
|
|
if isinstance(parent, dict):
|
|
for key in reversed(list(parent.keys())):
|
|
if isinstance(parent[key], dict) and not parent[key]:
|
|
parent[key] = []
|
|
container = parent[key]
|
|
stack[-1] = (container, stack[-1][1], 'list')
|
|
break
|
|
|
|
if isinstance(container, list):
|
|
if ':' in item_content and not item_content.startswith('"'):
|
|
# List item is a dict
|
|
key, _, value = item_content.partition(':')
|
|
key = key.strip()
|
|
value = value.strip()
|
|
new_dict = {key: parse_value(value) if value else {}}
|
|
container.append(new_dict)
|
|
if not value:
|
|
stack.append((new_dict[key], indent + 2, 'dict'))
|
|
else:
|
|
stack.append((new_dict, indent, 'dict'))
|
|
else:
|
|
container.append(parse_value(item_content))
|
|
|
|
# Handle key: value pairs
|
|
elif ':' in stripped:
|
|
key, _, value = stripped.partition(':')
|
|
key = key.strip()
|
|
value = value.strip()
|
|
|
|
if isinstance(container, dict):
|
|
if value:
|
|
container[key] = parse_value(value)
|
|
else:
|
|
container[key] = {}
|
|
stack.append((container[key], indent, 'dict'))
|
|
|
|
i += 1
|
|
|
|
return result
|
|
|
|
|
|
def save_yaml(filepath: str, data: dict):
|
|
"""Save data to YAML file."""
|
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
|
|
|
if HAS_YAML:
|
|
with open(filepath, 'w') as f:
|
|
yaml.dump(data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
else:
|
|
# Simple JSON fallback
|
|
with open(filepath, 'w') as f:
|
|
json.dump(data, f, indent=2)
|
|
|
|
|
|
# ============================================================================
|
|
# Validation Classes
|
|
# ============================================================================
|
|
|
|
class ValidationError:
|
|
"""Represents a validation error."""
|
|
def __init__(self, category: str, entity_id: str, message: str, severity: str = "error"):
|
|
self.category = category
|
|
self.entity_id = entity_id
|
|
self.message = message
|
|
self.severity = severity # error, warning
|
|
|
|
def __str__(self):
|
|
icon = "❌" if self.severity == "error" else "⚠️"
|
|
return f"{icon} [{self.category}] {self.entity_id}: {self.message}"
|
|
|
|
|
|
class DesignValidator:
|
|
"""Validates design document structure and relationships."""
|
|
|
|
def __init__(self, design_doc: dict):
|
|
self.design = design_doc
|
|
self.errors: List[ValidationError] = []
|
|
self.warnings: List[ValidationError] = []
|
|
|
|
# Collected entity IDs
|
|
self.model_ids: Set[str] = set()
|
|
self.api_ids: Set[str] = set()
|
|
self.page_ids: Set[str] = set()
|
|
self.component_ids: Set[str] = set()
|
|
self.all_ids: Set[str] = set()
|
|
|
|
def validate(self) -> bool:
|
|
"""Run all validations. Returns True if no errors."""
|
|
self._collect_ids()
|
|
self._validate_models()
|
|
self._validate_apis()
|
|
self._validate_pages()
|
|
self._validate_components()
|
|
self._validate_no_circular_deps()
|
|
|
|
return len(self.errors) == 0
|
|
|
|
def _collect_ids(self):
|
|
"""Collect all entity IDs."""
|
|
for model in self.design.get('data_models', []):
|
|
self.model_ids.add(model.get('id', ''))
|
|
for api in self.design.get('api_endpoints', []):
|
|
self.api_ids.add(api.get('id', ''))
|
|
for page in self.design.get('pages', []):
|
|
self.page_ids.add(page.get('id', ''))
|
|
for comp in self.design.get('components', []):
|
|
self.component_ids.add(comp.get('id', ''))
|
|
|
|
self.all_ids = self.model_ids | self.api_ids | self.page_ids | self.component_ids
|
|
|
|
def _validate_models(self):
|
|
"""Validate data models."""
|
|
for model in self.design.get('data_models', []):
|
|
model_id = model.get('id', 'unknown')
|
|
|
|
# Check required fields
|
|
if not model.get('id'):
|
|
self.errors.append(ValidationError('model', model_id, "Missing 'id' field"))
|
|
if not model.get('name'):
|
|
self.errors.append(ValidationError('model', model_id, "Missing 'name' field"))
|
|
if not model.get('fields'):
|
|
self.errors.append(ValidationError('model', model_id, "Missing 'fields' - model has no fields"))
|
|
|
|
# Check for primary key
|
|
fields = model.get('fields', [])
|
|
has_pk = any('primary_key' in f.get('constraints', []) for f in fields)
|
|
|
|
# Also accept composite primary key via unique index (common for junction tables)
|
|
if not has_pk:
|
|
indexes = model.get('indexes', [])
|
|
has_composite_pk = any(
|
|
idx.get('unique', False) and len(idx.get('fields', [])) >= 2
|
|
for idx in indexes
|
|
)
|
|
has_pk = has_composite_pk
|
|
|
|
if not has_pk:
|
|
self.errors.append(ValidationError('model', model_id, "No primary_key field defined"))
|
|
|
|
# Check relations reference existing models
|
|
for relation in model.get('relations', []):
|
|
target = relation.get('target', '')
|
|
if target and target not in self.model_ids:
|
|
self.errors.append(ValidationError(
|
|
'model', model_id,
|
|
f"Relation target '{target}' does not exist"
|
|
))
|
|
|
|
# Check enum fields have values (accept both 'enum_values' and 'values')
|
|
for field in fields:
|
|
if field.get('type') == 'enum':
|
|
has_values = field.get('enum_values') or field.get('values')
|
|
if not has_values:
|
|
self.errors.append(ValidationError(
|
|
'model', model_id,
|
|
f"Enum field '{field.get('name')}' missing enum_values or values"
|
|
))
|
|
|
|
def _validate_apis(self):
|
|
"""Validate API endpoints."""
|
|
for api in self.design.get('api_endpoints', []):
|
|
api_id = api.get('id', 'unknown')
|
|
|
|
# Check required fields
|
|
if not api.get('id'):
|
|
self.errors.append(ValidationError('api', api_id, "Missing 'id' field"))
|
|
if not api.get('method'):
|
|
self.errors.append(ValidationError('api', api_id, "Missing 'method' field"))
|
|
if not api.get('path'):
|
|
self.errors.append(ValidationError('api', api_id, "Missing 'path' field"))
|
|
|
|
# POST/PUT/PATCH should have request_body (null is explicitly no body needed)
|
|
method = api.get('method', '').upper()
|
|
has_body = 'request_body' in api # Check if key exists (even if null)
|
|
if method in ['POST', 'PUT', 'PATCH'] and not has_body:
|
|
self.warnings.append(ValidationError(
|
|
'api', api_id,
|
|
f"{method} endpoint should have request_body",
|
|
severity="warning"
|
|
))
|
|
|
|
# Check at least one response defined
|
|
if not api.get('responses'):
|
|
self.errors.append(ValidationError('api', api_id, "No responses defined"))
|
|
|
|
# Check model dependencies exist
|
|
for model_id in api.get('depends_on_models', []):
|
|
if model_id not in self.model_ids:
|
|
self.errors.append(ValidationError(
|
|
'api', api_id,
|
|
f"depends_on_models references non-existent model '{model_id}'"
|
|
))
|
|
|
|
# Check API dependencies exist
|
|
for dep_api_id in api.get('depends_on_apis', []):
|
|
if dep_api_id not in self.api_ids:
|
|
self.errors.append(ValidationError(
|
|
'api', api_id,
|
|
f"depends_on_apis references non-existent API '{dep_api_id}'"
|
|
))
|
|
|
|
def _validate_pages(self):
|
|
"""Validate pages."""
|
|
for page in self.design.get('pages', []):
|
|
page_id = page.get('id', 'unknown')
|
|
|
|
# Check required fields
|
|
if not page.get('id'):
|
|
self.errors.append(ValidationError('page', page_id, "Missing 'id' field"))
|
|
if not page.get('path'):
|
|
self.errors.append(ValidationError('page', page_id, "Missing 'path' field"))
|
|
|
|
# Check data_needs reference existing APIs
|
|
for data_need in page.get('data_needs', []):
|
|
api_id = data_need.get('api_id', '')
|
|
if api_id and api_id not in self.api_ids:
|
|
self.errors.append(ValidationError(
|
|
'page', page_id,
|
|
f"data_needs references non-existent API '{api_id}'"
|
|
))
|
|
|
|
# Check components exist
|
|
for comp_id in page.get('components', []):
|
|
if comp_id not in self.component_ids:
|
|
self.errors.append(ValidationError(
|
|
'page', page_id,
|
|
f"References non-existent component '{comp_id}'"
|
|
))
|
|
|
|
def _validate_components(self):
|
|
"""Validate components."""
|
|
for comp in self.design.get('components', []):
|
|
comp_id = comp.get('id', 'unknown')
|
|
|
|
# Check required fields
|
|
if not comp.get('id'):
|
|
self.errors.append(ValidationError('component', comp_id, "Missing 'id' field"))
|
|
if not comp.get('name'):
|
|
self.errors.append(ValidationError('component', comp_id, "Missing 'name' field"))
|
|
|
|
# Check uses_apis reference existing APIs
|
|
for api_id in comp.get('uses_apis', []):
|
|
if api_id not in self.api_ids:
|
|
self.errors.append(ValidationError(
|
|
'component', comp_id,
|
|
f"uses_apis references non-existent API '{api_id}'"
|
|
))
|
|
|
|
# Check uses_components reference existing components
|
|
for child_id in comp.get('uses_components', []):
|
|
if child_id not in self.component_ids:
|
|
self.errors.append(ValidationError(
|
|
'component', comp_id,
|
|
f"uses_components references non-existent component '{child_id}'"
|
|
))
|
|
|
|
def _validate_no_circular_deps(self):
|
|
"""Check for circular dependencies."""
|
|
# Build dependency graph
|
|
deps: Dict[str, Set[str]] = defaultdict(set)
|
|
|
|
# Model relations
|
|
for model in self.design.get('data_models', []):
|
|
model_id = model.get('id', '')
|
|
for relation in model.get('relations', []):
|
|
target = relation.get('target', '')
|
|
if target:
|
|
deps[model_id].add(target)
|
|
|
|
# API dependencies
|
|
for api in self.design.get('api_endpoints', []):
|
|
api_id = api.get('id', '')
|
|
for model_id in api.get('depends_on_models', []):
|
|
deps[api_id].add(model_id)
|
|
for dep_api_id in api.get('depends_on_apis', []):
|
|
deps[api_id].add(dep_api_id)
|
|
|
|
# Page dependencies
|
|
for page in self.design.get('pages', []):
|
|
page_id = page.get('id', '')
|
|
for data_need in page.get('data_needs', []):
|
|
api_id = data_need.get('api_id', '')
|
|
if api_id:
|
|
deps[page_id].add(api_id)
|
|
for comp_id in page.get('components', []):
|
|
deps[page_id].add(comp_id)
|
|
|
|
# Component dependencies
|
|
for comp in self.design.get('components', []):
|
|
comp_id = comp.get('id', '')
|
|
for api_id in comp.get('uses_apis', []):
|
|
deps[comp_id].add(api_id)
|
|
for child_id in comp.get('uses_components', []):
|
|
deps[comp_id].add(child_id)
|
|
|
|
# Detect cycles using DFS
|
|
visited = set()
|
|
rec_stack = set()
|
|
|
|
def has_cycle(node: str, path: List[str]) -> Optional[List[str]]:
|
|
visited.add(node)
|
|
rec_stack.add(node)
|
|
path.append(node)
|
|
|
|
for neighbor in deps.get(node, []):
|
|
if neighbor not in visited:
|
|
result = has_cycle(neighbor, path)
|
|
if result:
|
|
return result
|
|
elif neighbor in rec_stack:
|
|
# Found cycle
|
|
cycle_start = path.index(neighbor)
|
|
return path[cycle_start:] + [neighbor]
|
|
|
|
path.pop()
|
|
rec_stack.remove(node)
|
|
return None
|
|
|
|
for entity_id in self.all_ids:
|
|
if entity_id not in visited:
|
|
cycle = has_cycle(entity_id, [])
|
|
if cycle:
|
|
self.errors.append(ValidationError(
|
|
'dependency', entity_id,
|
|
f"Circular dependency detected: {' → '.join(cycle)}"
|
|
))
|
|
|
|
def print_report(self):
|
|
"""Print validation report."""
|
|
print()
|
|
print("=" * 60)
|
|
print("DESIGN VALIDATION REPORT".center(60))
|
|
print("=" * 60)
|
|
|
|
# Summary
|
|
print()
|
|
print(f" Models: {len(self.model_ids)}")
|
|
print(f" APIs: {len(self.api_ids)}")
|
|
print(f" Pages: {len(self.page_ids)}")
|
|
print(f" Components: {len(self.component_ids)}")
|
|
print(f" Total: {len(self.all_ids)}")
|
|
|
|
# Errors
|
|
if self.errors:
|
|
print()
|
|
print("-" * 60)
|
|
print(f"ERRORS ({len(self.errors)})")
|
|
print("-" * 60)
|
|
for error in self.errors:
|
|
print(f" {error}")
|
|
|
|
# Warnings
|
|
if self.warnings:
|
|
print()
|
|
print("-" * 60)
|
|
print(f"WARNINGS ({len(self.warnings)})")
|
|
print("-" * 60)
|
|
for warning in self.warnings:
|
|
print(f" {warning}")
|
|
|
|
# Result
|
|
print()
|
|
print("=" * 60)
|
|
if self.errors:
|
|
print("❌ VALIDATION FAILED".center(60))
|
|
else:
|
|
print("✅ VALIDATION PASSED".center(60))
|
|
print("=" * 60)
|
|
|
|
|
|
# ============================================================================
|
|
# Dependency Graph Generator
|
|
# ============================================================================
|
|
|
|
class DependencyGraphGenerator:
|
|
"""Generates dependency graph and execution layers from design document."""
|
|
|
|
def __init__(self, design_doc: dict):
|
|
self.design = design_doc
|
|
self.deps: Dict[str, Set[str]] = defaultdict(set)
|
|
self.reverse_deps: Dict[str, Set[str]] = defaultdict(set)
|
|
self.entity_types: Dict[str, str] = {}
|
|
self.entity_names: Dict[str, str] = {}
|
|
self.layers: List[List[str]] = []
|
|
|
|
def generate(self) -> dict:
|
|
"""Generate the full dependency graph."""
|
|
self._build_dependency_map()
|
|
self._calculate_layers()
|
|
return self._build_graph_document()
|
|
|
|
def _build_dependency_map(self):
|
|
"""Build forward and reverse dependency maps."""
|
|
# Models
|
|
for model in self.design.get('data_models', []):
|
|
model_id = model.get('id', '')
|
|
self.entity_types[model_id] = 'model'
|
|
self.entity_names[model_id] = model.get('name', model_id)
|
|
|
|
for relation in model.get('relations', []):
|
|
target = relation.get('target', '')
|
|
if target:
|
|
self.deps[model_id].add(target)
|
|
self.reverse_deps[target].add(model_id)
|
|
|
|
# APIs
|
|
for api in self.design.get('api_endpoints', []):
|
|
api_id = api.get('id', '')
|
|
self.entity_types[api_id] = 'api'
|
|
self.entity_names[api_id] = api.get('summary', api_id)
|
|
|
|
for model_id in api.get('depends_on_models', []):
|
|
self.deps[api_id].add(model_id)
|
|
self.reverse_deps[model_id].add(api_id)
|
|
|
|
for dep_api_id in api.get('depends_on_apis', []):
|
|
self.deps[api_id].add(dep_api_id)
|
|
self.reverse_deps[dep_api_id].add(api_id)
|
|
|
|
# Pages
|
|
for page in self.design.get('pages', []):
|
|
page_id = page.get('id', '')
|
|
self.entity_types[page_id] = 'page'
|
|
self.entity_names[page_id] = page.get('name', page_id)
|
|
|
|
for data_need in page.get('data_needs', []):
|
|
api_id = data_need.get('api_id', '')
|
|
if api_id:
|
|
self.deps[page_id].add(api_id)
|
|
self.reverse_deps[api_id].add(page_id)
|
|
|
|
for comp_id in page.get('components', []):
|
|
self.deps[page_id].add(comp_id)
|
|
self.reverse_deps[comp_id].add(page_id)
|
|
|
|
# Components
|
|
for comp in self.design.get('components', []):
|
|
comp_id = comp.get('id', '')
|
|
self.entity_types[comp_id] = 'component'
|
|
self.entity_names[comp_id] = comp.get('name', comp_id)
|
|
|
|
for api_id in comp.get('uses_apis', []):
|
|
self.deps[comp_id].add(api_id)
|
|
self.reverse_deps[api_id].add(comp_id)
|
|
|
|
for child_id in comp.get('uses_components', []):
|
|
self.deps[comp_id].add(child_id)
|
|
self.reverse_deps[child_id].add(comp_id)
|
|
|
|
def _calculate_layers(self):
|
|
"""Calculate execution layers using topological sort."""
|
|
# Find all entities with no dependencies (Layer 1)
|
|
all_entities = set(self.entity_types.keys())
|
|
remaining = all_entities.copy()
|
|
assigned = set()
|
|
|
|
while remaining:
|
|
# Find entities whose dependencies are all assigned
|
|
layer = []
|
|
for entity_id in remaining:
|
|
deps = self.deps.get(entity_id, set())
|
|
if deps.issubset(assigned):
|
|
layer.append(entity_id)
|
|
|
|
if not layer:
|
|
# Shouldn't happen if no circular deps, but safety check
|
|
print(f"Warning: Could not assign remaining entities: {remaining}", file=sys.stderr)
|
|
break
|
|
|
|
self.layers.append(sorted(layer))
|
|
for entity_id in layer:
|
|
remaining.remove(entity_id)
|
|
assigned.add(entity_id)
|
|
|
|
def _build_graph_document(self) -> dict:
|
|
"""Build the dependency graph document."""
|
|
# Calculate stats
|
|
max_parallelism = max(len(layer) for layer in self.layers) if self.layers else 0
|
|
critical_path = len(self.layers)
|
|
|
|
graph = {
|
|
'dependency_graph': {
|
|
'design_version': self.design.get('revision', 1),
|
|
'workflow_version': self.design.get('workflow_version', 'v001'),
|
|
'generated_at': datetime.now().isoformat(),
|
|
'generator': 'validate_design.py',
|
|
'stats': {
|
|
'total_entities': len(self.entity_types),
|
|
'total_layers': len(self.layers),
|
|
'max_parallelism': max_parallelism,
|
|
'critical_path_length': critical_path
|
|
}
|
|
},
|
|
'layers': [],
|
|
'dependency_map': {},
|
|
'task_map': []
|
|
}
|
|
|
|
# Build layers
|
|
layer_names = {
|
|
1: ("Data Layer", "Database models - no external dependencies"),
|
|
2: ("API Layer", "REST endpoints - depend on models"),
|
|
3: ("UI Layer", "Pages and components - depend on APIs"),
|
|
}
|
|
|
|
for i, layer_entities in enumerate(self.layers, 1):
|
|
name, desc = layer_names.get(i, (f"Layer {i}", f"Entities with {i-1} levels of dependencies"))
|
|
|
|
layer_items = []
|
|
for entity_id in layer_entities:
|
|
entity_type = self.entity_types.get(entity_id, 'unknown')
|
|
agent = 'backend' if entity_type in ['model', 'api'] else 'frontend'
|
|
|
|
layer_items.append({
|
|
'id': entity_id,
|
|
'type': entity_type,
|
|
'name': self.entity_names.get(entity_id, entity_id),
|
|
'depends_on': list(self.deps.get(entity_id, [])),
|
|
'task_id': f"task_create_{entity_id}",
|
|
'agent': agent,
|
|
'complexity': 'medium' # Could be calculated
|
|
})
|
|
|
|
graph['layers'].append({
|
|
'layer': i,
|
|
'name': name,
|
|
'description': desc,
|
|
'items': layer_items,
|
|
'requires_layers': list(range(1, i)) if i > 1 else [],
|
|
'parallel_count': len(layer_items)
|
|
})
|
|
|
|
# Build dependency map
|
|
for entity_id in self.entity_types:
|
|
graph['dependency_map'][entity_id] = {
|
|
'type': self.entity_types.get(entity_id),
|
|
'layer': self._get_layer_number(entity_id),
|
|
'depends_on': list(self.deps.get(entity_id, [])),
|
|
'depended_by': list(self.reverse_deps.get(entity_id, []))
|
|
}
|
|
|
|
return graph
|
|
|
|
def _get_layer_number(self, entity_id: str) -> int:
|
|
"""Get the layer number for an entity."""
|
|
for i, layer in enumerate(self.layers, 1):
|
|
if entity_id in layer:
|
|
return i
|
|
return 0
|
|
|
|
def print_layers(self):
|
|
"""Print layer visualization."""
|
|
print()
|
|
print("=" * 60)
|
|
print("EXECUTION LAYERS".center(60))
|
|
print("=" * 60)
|
|
|
|
for i, layer_entities in enumerate(self.layers, 1):
|
|
print()
|
|
print(f"Layer {i}: ({len(layer_entities)} items - parallel)")
|
|
print("-" * 40)
|
|
|
|
for entity_id in layer_entities:
|
|
entity_type = self.entity_types.get(entity_id, '?')
|
|
icon = {'model': '📦', 'api': '🔌', 'page': '📄', 'component': '🧩'}.get(entity_type, '❓')
|
|
deps = self.deps.get(entity_id, set())
|
|
deps_str = f" ← [{', '.join(deps)}]" if deps else ""
|
|
print(f" {icon} {entity_id}{deps_str}")
|
|
|
|
print()
|
|
print("=" * 60)
|
|
|
|
|
|
# ============================================================================
|
|
# Context Generator
|
|
# ============================================================================
|
|
|
|
class ContextGenerator:
|
|
"""Generates context snapshots for tasks."""
|
|
|
|
def __init__(self, design_doc: dict, graph: dict, output_dir: str):
|
|
self.design = design_doc
|
|
self.graph = graph
|
|
self.output_dir = output_dir
|
|
|
|
# Index design entities by ID for quick lookup
|
|
self.models: Dict[str, dict] = {}
|
|
self.apis: Dict[str, dict] = {}
|
|
self.pages: Dict[str, dict] = {}
|
|
self.components: Dict[str, dict] = {}
|
|
|
|
self._index_entities()
|
|
|
|
def _index_entities(self):
|
|
"""Index all entities by ID."""
|
|
for model in self.design.get('data_models', []):
|
|
self.models[model.get('id', '')] = model
|
|
for api in self.design.get('api_endpoints', []):
|
|
self.apis[api.get('id', '')] = api
|
|
for page in self.design.get('pages', []):
|
|
self.pages[page.get('id', '')] = page
|
|
for comp in self.design.get('components', []):
|
|
self.components[comp.get('id', '')] = comp
|
|
|
|
def generate_all_contexts(self):
|
|
"""Generate context files for all entities."""
|
|
contexts_dir = Path(self.output_dir) / 'contexts'
|
|
contexts_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
for entity_id, entity_info in self.graph.get('dependency_map', {}).items():
|
|
context = self._generate_context(entity_id, entity_info)
|
|
context_path = contexts_dir / f"{entity_id}.yml"
|
|
save_yaml(str(context_path), context)
|
|
|
|
print(f"Generated {len(self.graph.get('dependency_map', {}))} context files in {contexts_dir}")
|
|
|
|
def _generate_context(self, entity_id: str, entity_info: dict) -> dict:
|
|
"""Generate context for a single entity."""
|
|
entity_type = entity_info.get('type', '')
|
|
deps = entity_info.get('depends_on', [])
|
|
|
|
context = {
|
|
'task_id': f"task_create_{entity_id}",
|
|
'entity_id': entity_id,
|
|
'generated_at': datetime.now().isoformat(),
|
|
'workflow_version': self.graph.get('dependency_graph', {}).get('workflow_version', 'v001'),
|
|
'target': {
|
|
'type': entity_type,
|
|
'definition': self._get_entity_definition(entity_id, entity_type)
|
|
},
|
|
'related': {
|
|
'models': [],
|
|
'apis': [],
|
|
'components': []
|
|
},
|
|
'dependencies': {
|
|
'entity_ids': deps,
|
|
'definitions': []
|
|
},
|
|
'files': {
|
|
'to_create': self._get_files_to_create(entity_id, entity_type),
|
|
'reference': []
|
|
},
|
|
'acceptance': self._get_acceptance_criteria(entity_id, entity_type)
|
|
}
|
|
|
|
# Add related entity definitions
|
|
for dep_id in deps:
|
|
dep_info = self.graph.get('dependency_map', {}).get(dep_id, {})
|
|
dep_type = dep_info.get('type', '')
|
|
dep_def = self._get_entity_definition(dep_id, dep_type)
|
|
|
|
if dep_type == 'model':
|
|
context['related']['models'].append({'id': dep_id, 'definition': dep_def})
|
|
elif dep_type == 'api':
|
|
context['related']['apis'].append({'id': dep_id, 'definition': dep_def})
|
|
elif dep_type == 'component':
|
|
context['related']['components'].append({'id': dep_id, 'definition': dep_def})
|
|
|
|
context['dependencies']['definitions'].append({
|
|
'id': dep_id,
|
|
'type': dep_type,
|
|
'definition': dep_def
|
|
})
|
|
|
|
return context
|
|
|
|
def _get_entity_definition(self, entity_id: str, entity_type: str) -> dict:
|
|
"""Get the full definition for an entity."""
|
|
if entity_type == 'model':
|
|
return self.models.get(entity_id, {})
|
|
elif entity_type == 'api':
|
|
return self.apis.get(entity_id, {})
|
|
elif entity_type == 'page':
|
|
return self.pages.get(entity_id, {})
|
|
elif entity_type == 'component':
|
|
return self.components.get(entity_id, {})
|
|
return {}
|
|
|
|
def _get_files_to_create(self, entity_id: str, entity_type: str) -> List[str]:
|
|
"""Get list of files to create for an entity."""
|
|
if entity_type == 'model':
|
|
name = self.models.get(entity_id, {}).get('name', entity_id)
|
|
return [
|
|
'prisma/schema.prisma',
|
|
f'app/models/{name.lower()}.ts'
|
|
]
|
|
elif entity_type == 'api':
|
|
path = self.apis.get(entity_id, {}).get('path', '/api/unknown')
|
|
route_path = path.replace('/api/', '').replace(':', '')
|
|
return [f'app/api/{route_path}/route.ts']
|
|
elif entity_type == 'page':
|
|
path = self.pages.get(entity_id, {}).get('path', '/unknown')
|
|
return [f'app{path}/page.tsx']
|
|
elif entity_type == 'component':
|
|
name = self.components.get(entity_id, {}).get('name', 'Unknown')
|
|
return [f'app/components/{name}.tsx']
|
|
return []
|
|
|
|
def _get_acceptance_criteria(self, entity_id: str, entity_type: str) -> List[dict]:
|
|
"""Get acceptance criteria for an entity."""
|
|
criteria = []
|
|
|
|
if entity_type == 'model':
|
|
criteria = [
|
|
{'criterion': 'Model defined in Prisma schema', 'verification': 'Check prisma/schema.prisma'},
|
|
{'criterion': 'TypeScript types exported', 'verification': 'Import type in test file'},
|
|
{'criterion': 'Relations properly configured', 'verification': 'Check Prisma relations'},
|
|
]
|
|
elif entity_type == 'api':
|
|
api = self.apis.get(entity_id, {})
|
|
method = api.get('method', 'GET')
|
|
path = api.get('path', '/api/unknown')
|
|
criteria = [
|
|
{'criterion': f'{method} {path} returns success response', 'verification': f'curl -X {method} {path}'},
|
|
{'criterion': 'Request validation implemented', 'verification': 'Test with invalid data'},
|
|
{'criterion': 'Error responses match contract', 'verification': 'Test error scenarios'},
|
|
]
|
|
elif entity_type == 'page':
|
|
page = self.pages.get(entity_id, {})
|
|
path = page.get('path', '/unknown')
|
|
criteria = [
|
|
{'criterion': f'Page renders at {path}', 'verification': f'Navigate to {path}'},
|
|
{'criterion': 'Data fetching works', 'verification': 'Check network tab'},
|
|
{'criterion': 'Components render correctly', 'verification': 'Visual inspection'},
|
|
]
|
|
elif entity_type == 'component':
|
|
criteria = [
|
|
{'criterion': 'Component renders without errors', 'verification': 'Import and render in test'},
|
|
{'criterion': 'Props are typed correctly', 'verification': 'TypeScript compilation'},
|
|
{'criterion': 'Events fire correctly', 'verification': 'Test event handlers'},
|
|
]
|
|
|
|
return criteria
|
|
|
|
|
|
# ============================================================================
|
|
# Task Generator
|
|
# ============================================================================
|
|
|
|
class TaskGenerator:
|
|
"""Generates task files with full context."""
|
|
|
|
def __init__(self, design_doc: dict, graph: dict, output_dir: str):
|
|
self.design = design_doc
|
|
self.graph = graph
|
|
self.output_dir = output_dir
|
|
|
|
def generate_all_tasks(self):
|
|
"""Generate task files for all entities."""
|
|
tasks_dir = Path(self.output_dir) / 'tasks'
|
|
tasks_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
task_count = 0
|
|
for layer in self.graph.get('layers', []):
|
|
for item in layer.get('items', []):
|
|
task = self._generate_task(item, layer.get('layer', 1))
|
|
task_path = tasks_dir / f"{task['id']}.yml"
|
|
save_yaml(str(task_path), task)
|
|
task_count += 1
|
|
|
|
print(f"Generated {task_count} task files in {tasks_dir}")
|
|
|
|
def _generate_task(self, item: dict, layer_num: int) -> dict:
|
|
"""Generate a task for an entity."""
|
|
entity_id = item.get('id', '')
|
|
entity_type = item.get('type', '')
|
|
|
|
task = {
|
|
'id': item.get('task_id', f'task_create_{entity_id}'),
|
|
'type': 'create',
|
|
'title': f"Create {item.get('name', entity_id)}",
|
|
'agent': item.get('agent', 'backend'),
|
|
'entity_id': entity_id,
|
|
'entity_ids': [entity_id],
|
|
'status': 'pending',
|
|
'layer': layer_num,
|
|
'parallel_group': f"layer_{layer_num}",
|
|
'complexity': item.get('complexity', 'medium'),
|
|
'dependencies': [f"task_create_{dep}" for dep in item.get('depends_on', [])],
|
|
'context': {
|
|
'design_version': self.graph.get('dependency_graph', {}).get('design_version', 1),
|
|
'workflow_version': self.graph.get('dependency_graph', {}).get('workflow_version', 'v001'),
|
|
'context_snapshot_path': f".workflow/versions/v001/contexts/{entity_id}.yml"
|
|
},
|
|
'created_at': datetime.now().isoformat()
|
|
}
|
|
|
|
return task
|
|
|
|
|
|
# ============================================================================
|
|
# Main CLI
|
|
# ============================================================================
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Validate design document and generate dependency graph")
|
|
parser.add_argument('design_file', help='Path to design_document.yml')
|
|
parser.add_argument('--output-dir', '-o', default='.workflow/versions/v001',
|
|
help='Output directory for generated files')
|
|
parser.add_argument('--validate-only', '-v', action='store_true',
|
|
help='Only validate, do not generate files')
|
|
parser.add_argument('--quiet', '-q', action='store_true',
|
|
help='Suppress output except errors')
|
|
parser.add_argument('--json', action='store_true',
|
|
help='Output validation result as JSON')
|
|
|
|
args = parser.parse_args()
|
|
|
|
# Load design document
|
|
design = load_yaml(args.design_file)
|
|
if not design:
|
|
print(f"Error: Could not load design document: {args.design_file}", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
# Validate
|
|
validator = DesignValidator(design)
|
|
is_valid = validator.validate()
|
|
|
|
if args.json:
|
|
result = {
|
|
'valid': is_valid,
|
|
'errors': [str(e) for e in validator.errors],
|
|
'warnings': [str(w) for w in validator.warnings],
|
|
'stats': {
|
|
'models': len(validator.model_ids),
|
|
'apis': len(validator.api_ids),
|
|
'pages': len(validator.page_ids),
|
|
'components': len(validator.component_ids)
|
|
}
|
|
}
|
|
print(json.dumps(result, indent=2))
|
|
sys.exit(0 if is_valid else 1)
|
|
|
|
if not args.quiet:
|
|
validator.print_report()
|
|
|
|
if not is_valid:
|
|
sys.exit(1)
|
|
|
|
if args.validate_only:
|
|
sys.exit(0)
|
|
|
|
# Generate dependency graph
|
|
generator = DependencyGraphGenerator(design)
|
|
graph = generator.generate()
|
|
|
|
if not args.quiet:
|
|
generator.print_layers()
|
|
|
|
# Save dependency graph
|
|
output_dir = Path(args.output_dir)
|
|
output_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
graph_path = output_dir / 'dependency_graph.yml'
|
|
save_yaml(str(graph_path), graph)
|
|
print(f"Saved dependency graph to: {graph_path}")
|
|
|
|
# Generate context files
|
|
context_gen = ContextGenerator(design, graph, str(output_dir))
|
|
context_gen.generate_all_contexts()
|
|
|
|
# Generate task files
|
|
task_gen = TaskGenerator(design, graph, str(output_dir))
|
|
task_gen.generate_all_tasks()
|
|
|
|
print()
|
|
print("✅ Design validation and generation complete!")
|
|
print(f" Output directory: {output_dir}")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|