424 lines
14 KiB
Python
424 lines
14 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Implementation Validator - Checks generated code against design specs.
|
|
|
|
Validates:
|
|
1. Component props match design document
|
|
2. API endpoints match design specs
|
|
3. Model fields match design specs
|
|
4. Required files exist
|
|
"""
|
|
|
|
import argparse
|
|
import os
|
|
import re
|
|
import sys
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional, Set
|
|
|
|
# Try to import yaml
|
|
try:
|
|
import yaml
|
|
HAS_YAML = True
|
|
except ImportError:
|
|
HAS_YAML = False
|
|
print("Warning: PyYAML not installed.", file=sys.stderr)
|
|
|
|
|
|
def load_yaml(filepath: str) -> dict:
|
|
"""Load YAML file."""
|
|
if not os.path.exists(filepath):
|
|
return {}
|
|
with open(filepath, 'r') as f:
|
|
content = f.read()
|
|
if not content.strip():
|
|
return {}
|
|
if HAS_YAML:
|
|
return yaml.safe_load(content) or {}
|
|
return {}
|
|
|
|
|
|
class ImplementationValidator:
|
|
"""Validates implementation against design specs."""
|
|
|
|
def __init__(self, design_path: str, project_root: str = "."):
|
|
self.design = load_yaml(design_path)
|
|
self.project_root = Path(project_root)
|
|
self.errors: List[Dict] = []
|
|
self.warnings: List[Dict] = []
|
|
self.passed: List[Dict] = []
|
|
|
|
def validate_all(self) -> bool:
|
|
"""Run all validations."""
|
|
self._validate_components()
|
|
self._validate_apis()
|
|
self._validate_models()
|
|
return len(self.errors) == 0
|
|
|
|
def _validate_components(self):
|
|
"""Validate React components against design specs."""
|
|
components = self.design.get('components', [])
|
|
|
|
for comp in components:
|
|
comp_id = comp.get('id', 'unknown')
|
|
comp_name = comp.get('name', 'Unknown')
|
|
|
|
# Find component file
|
|
possible_paths = [
|
|
f"components/{comp_name}.tsx",
|
|
f"app/components/{comp_name}.tsx",
|
|
f"src/components/{comp_name}.tsx",
|
|
f"components/{comp_name}/index.tsx",
|
|
]
|
|
|
|
comp_file = None
|
|
for path in possible_paths:
|
|
full_path = self.project_root / path
|
|
if full_path.exists():
|
|
comp_file = full_path
|
|
break
|
|
|
|
if not comp_file:
|
|
self.errors.append({
|
|
'type': 'component',
|
|
'id': comp_id,
|
|
'message': f"Component file not found for {comp_name}",
|
|
'expected': possible_paths[0]
|
|
})
|
|
continue
|
|
|
|
# Read and parse component
|
|
content = comp_file.read_text()
|
|
|
|
# Check props
|
|
design_props = comp.get('props', [])
|
|
self._validate_component_props(comp_id, comp_name, content, design_props)
|
|
|
|
# Check events
|
|
design_events = comp.get('events', [])
|
|
self._validate_component_events(comp_id, comp_name, content, design_events)
|
|
|
|
def _validate_component_props(self, comp_id: str, comp_name: str, content: str, design_props: List[Dict]):
|
|
"""Validate component props match design."""
|
|
|
|
# Extract interface from content
|
|
interface_match = re.search(
|
|
rf'(?:interface|type)\s+{comp_name}Props\s*[=]?\s*\{{([^}}]+)\}}',
|
|
content,
|
|
re.DOTALL
|
|
)
|
|
|
|
if not interface_match and design_props:
|
|
self.errors.append({
|
|
'type': 'component',
|
|
'id': comp_id,
|
|
'message': f"No Props interface found for {comp_name}",
|
|
'expected': f"interface {comp_name}Props {{ ... }}"
|
|
})
|
|
return
|
|
|
|
if interface_match:
|
|
interface_content = interface_match.group(1)
|
|
|
|
for prop in design_props:
|
|
prop_name = prop.get('name', '')
|
|
prop_type = prop.get('type', 'any')
|
|
required = prop.get('required', False)
|
|
|
|
# Check if prop exists in interface
|
|
prop_pattern = rf'\b{prop_name}\s*[?]?\s*:'
|
|
if not re.search(prop_pattern, interface_content):
|
|
self.errors.append({
|
|
'type': 'component',
|
|
'id': comp_id,
|
|
'message': f"Missing prop '{prop_name}' in {comp_name}",
|
|
'expected': f"{prop_name}: {prop_type}",
|
|
'design_spec': prop
|
|
})
|
|
else:
|
|
self.passed.append({
|
|
'type': 'component',
|
|
'id': comp_id,
|
|
'message': f"Prop '{prop_name}' found in {comp_name}"
|
|
})
|
|
|
|
def _validate_component_events(self, comp_id: str, comp_name: str, content: str, design_events: List[Dict]):
|
|
"""Validate component events match design."""
|
|
|
|
for event in design_events:
|
|
event_name = event.get('name', '')
|
|
|
|
# Check if event handler exists in props or component
|
|
if not re.search(rf'\b{event_name}\b', content):
|
|
self.warnings.append({
|
|
'type': 'component',
|
|
'id': comp_id,
|
|
'message': f"Event '{event_name}' not found in {comp_name}",
|
|
'expected': f"{event_name}?: (payload) => void",
|
|
'design_spec': event
|
|
})
|
|
else:
|
|
self.passed.append({
|
|
'type': 'component',
|
|
'id': comp_id,
|
|
'message': f"Event '{event_name}' found in {comp_name}"
|
|
})
|
|
|
|
def _validate_apis(self):
|
|
"""Validate API routes against design specs."""
|
|
apis = self.design.get('api_endpoints', [])
|
|
|
|
for api in apis:
|
|
api_id = api.get('id', 'unknown')
|
|
method = api.get('method', 'GET')
|
|
path = api.get('path', '')
|
|
|
|
# Convert API path to Next.js route file path
|
|
route_path = self._api_path_to_route(path)
|
|
|
|
possible_paths = [
|
|
f"app/api{route_path}/route.ts",
|
|
f"pages/api{route_path}.ts",
|
|
f"src/app/api{route_path}/route.ts",
|
|
]
|
|
|
|
route_file = None
|
|
for p in possible_paths:
|
|
full_path = self.project_root / p
|
|
if full_path.exists():
|
|
route_file = full_path
|
|
break
|
|
|
|
if not route_file:
|
|
self.warnings.append({
|
|
'type': 'api',
|
|
'id': api_id,
|
|
'message': f"API route not found for {method} {path}",
|
|
'expected': possible_paths[0]
|
|
})
|
|
continue
|
|
|
|
# Check if method handler exists
|
|
content = route_file.read_text()
|
|
method_upper = method.upper()
|
|
|
|
if not re.search(rf'export\s+(?:async\s+)?function\s+{method_upper}\b', content):
|
|
self.errors.append({
|
|
'type': 'api',
|
|
'id': api_id,
|
|
'message': f"Method handler {method_upper} not found in route",
|
|
'file': str(route_file)
|
|
})
|
|
else:
|
|
self.passed.append({
|
|
'type': 'api',
|
|
'id': api_id,
|
|
'message': f"API {method_upper} {path} implemented"
|
|
})
|
|
|
|
def _api_path_to_route(self, path: str) -> str:
|
|
"""Convert /api/users/:id to /users/[id]"""
|
|
# Remove /api prefix
|
|
route = path.replace('/api', '')
|
|
# Convert :param to [param]
|
|
route = re.sub(r':(\w+)', r'[\1]', route)
|
|
return route
|
|
|
|
def _validate_models(self):
|
|
"""Validate database models against design specs."""
|
|
models = self.design.get('data_models', [])
|
|
|
|
for model in models:
|
|
model_id = model.get('id', 'unknown')
|
|
model_name = model.get('name', 'Unknown')
|
|
table_name = model.get('table_name', '')
|
|
|
|
# Check Prisma schema
|
|
schema_path = self.project_root / 'prisma' / 'schema.prisma'
|
|
|
|
if not schema_path.exists():
|
|
self.warnings.append({
|
|
'type': 'model',
|
|
'id': model_id,
|
|
'message': "Prisma schema not found",
|
|
'expected': 'prisma/schema.prisma'
|
|
})
|
|
continue
|
|
|
|
content = schema_path.read_text()
|
|
|
|
# Check model exists
|
|
if not re.search(rf'model\s+{model_name}\s*\{{', content):
|
|
self.errors.append({
|
|
'type': 'model',
|
|
'id': model_id,
|
|
'message': f"Model {model_name} not found in Prisma schema",
|
|
'expected': f"model {model_name} {{ ... }}"
|
|
})
|
|
continue
|
|
|
|
# Check required fields
|
|
fields = model.get('fields', [])
|
|
model_match = re.search(
|
|
rf'model\s+{model_name}\s*\{{([^}}]+)\}}',
|
|
content,
|
|
re.DOTALL
|
|
)
|
|
|
|
if model_match:
|
|
model_content = model_match.group(1)
|
|
for field in fields:
|
|
field_name = field.get('name', '')
|
|
if field_name and not re.search(rf'\b{field_name}\b', model_content):
|
|
self.warnings.append({
|
|
'type': 'model',
|
|
'id': model_id,
|
|
'message': f"Field '{field_name}' not found in {model_name}",
|
|
'design_spec': field
|
|
})
|
|
|
|
def print_report(self):
|
|
"""Print validation report."""
|
|
print("\n" + "=" * 60)
|
|
print(" IMPLEMENTATION VALIDATION REPORT")
|
|
print("=" * 60)
|
|
|
|
print(f"\n ✅ Passed: {len(self.passed)}")
|
|
print(f" ⚠️ Warnings: {len(self.warnings)}")
|
|
print(f" ❌ Errors: {len(self.errors)}")
|
|
|
|
if self.errors:
|
|
print("\n" + "-" * 60)
|
|
print("ERRORS")
|
|
print("-" * 60)
|
|
for err in self.errors:
|
|
print(f" ❌ [{err['type']}] {err['id']}")
|
|
print(f" {err['message']}")
|
|
if 'expected' in err:
|
|
print(f" Expected: {err['expected']}")
|
|
if 'design_spec' in err:
|
|
print(f" Design: {err['design_spec']}")
|
|
|
|
if self.warnings:
|
|
print("\n" + "-" * 60)
|
|
print("WARNINGS")
|
|
print("-" * 60)
|
|
for warn in self.warnings:
|
|
print(f" ⚠️ [{warn['type']}] {warn['id']}")
|
|
print(f" {warn['message']}")
|
|
if 'expected' in warn:
|
|
print(f" Expected: {warn['expected']}")
|
|
|
|
print("\n" + "=" * 60)
|
|
if self.errors:
|
|
print(" ❌ VALIDATION FAILED")
|
|
else:
|
|
print(" ✅ VALIDATION PASSED")
|
|
print("=" * 60 + "\n")
|
|
|
|
def generate_checklist(self, output_path: str):
|
|
"""Generate a markdown checklist of implementation status."""
|
|
lines = [
|
|
"# Implementation Checklist",
|
|
"",
|
|
f"Generated: {__import__('datetime').datetime.now().isoformat()[:19]}",
|
|
"",
|
|
"## Components",
|
|
""
|
|
]
|
|
|
|
for comp in self.design.get('components', []):
|
|
comp_id = comp.get('id')
|
|
comp_name = comp.get('name')
|
|
|
|
# Check if this component has errors
|
|
has_error = any(e['id'] == comp_id for e in self.errors)
|
|
has_warning = any(w['id'] == comp_id for w in self.warnings)
|
|
|
|
if has_error:
|
|
status = "❌"
|
|
elif has_warning:
|
|
status = "⚠️"
|
|
else:
|
|
status = "✅"
|
|
|
|
lines.append(f"- [{status}] {comp_name} (`{comp_id}`)")
|
|
|
|
# List props
|
|
for prop in comp.get('props', []):
|
|
prop_errors = [e for e in self.errors if e['id'] == comp_id and prop['name'] in e.get('message', '')]
|
|
prop_status = "❌" if prop_errors else "✅"
|
|
lines.append(f" - [{prop_status}] prop: `{prop['name']}`: {prop.get('type', 'any')}")
|
|
|
|
# List events
|
|
for event in comp.get('events', []):
|
|
event_warnings = [w for w in self.warnings if w['id'] == comp_id and event['name'] in w.get('message', '')]
|
|
event_status = "⚠️" if event_warnings else "✅"
|
|
lines.append(f" - [{event_status}] event: `{event['name']}`")
|
|
|
|
lines.extend(["", "## API Endpoints", ""])
|
|
|
|
for api in self.design.get('api_endpoints', []):
|
|
api_id = api.get('id')
|
|
method = api.get('method')
|
|
path = api.get('path')
|
|
|
|
has_error = any(e['id'] == api_id for e in self.errors)
|
|
has_warning = any(w['id'] == api_id for w in self.warnings)
|
|
|
|
if has_error:
|
|
status = "❌"
|
|
elif has_warning:
|
|
status = "⚠️"
|
|
else:
|
|
status = "✅"
|
|
|
|
lines.append(f"- [{status}] `{method}` {path}")
|
|
|
|
lines.extend(["", "## Data Models", ""])
|
|
|
|
for model in self.design.get('data_models', []):
|
|
model_id = model.get('id')
|
|
model_name = model.get('name')
|
|
|
|
has_error = any(e['id'] == model_id for e in self.errors)
|
|
has_warning = any(w['id'] == model_id for w in self.warnings)
|
|
|
|
if has_error:
|
|
status = "❌"
|
|
elif has_warning:
|
|
status = "⚠️"
|
|
else:
|
|
status = "✅"
|
|
|
|
lines.append(f"- [{status}] {model_name} (`{model_id}`)")
|
|
|
|
# Write file
|
|
os.makedirs(os.path.dirname(output_path) or '.', exist_ok=True)
|
|
with open(output_path, 'w') as f:
|
|
f.write('\n'.join(lines))
|
|
|
|
print(f"\nChecklist saved to: {output_path}")
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description='Validate implementation against design')
|
|
parser.add_argument('design_path', help='Path to design_document.yml')
|
|
parser.add_argument('--project-root', default='.', help='Project root directory')
|
|
parser.add_argument('--checklist', help='Output path for markdown checklist')
|
|
|
|
args = parser.parse_args()
|
|
|
|
validator = ImplementationValidator(args.design_path, args.project_root)
|
|
validator.validate_all()
|
|
validator.print_report()
|
|
|
|
if args.checklist:
|
|
validator.generate_checklist(args.checklist)
|
|
|
|
sys.exit(0 if len(validator.errors) == 0 else 1)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|