382 lines
14 KiB
Python
382 lines
14 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Validate Implementation Against API Contract
|
|
|
|
This script verifies that both backend and frontend implementations
|
|
comply with the generated API contract.
|
|
|
|
Checks performed:
|
|
1. Backend routes exist and export correct HTTP methods
|
|
2. Frontend components import from shared types file
|
|
3. API calls use correct paths and methods
|
|
4. Types are properly imported (not recreated locally)
|
|
|
|
Exit codes:
|
|
0 = All validations pass
|
|
1 = Warnings found (non-critical violations)
|
|
2 = Critical violations (missing routes, type mismatches)
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import re
|
|
import json
|
|
from pathlib import Path
|
|
from typing import Dict, List, Any, Tuple, Optional
|
|
|
|
try:
|
|
import yaml
|
|
except ImportError:
|
|
yaml = None
|
|
|
|
|
|
def load_yaml(path: Path) -> Dict:
|
|
"""Load YAML file."""
|
|
if yaml:
|
|
with open(path) as f:
|
|
return yaml.safe_load(f)
|
|
else:
|
|
with open(path) as f:
|
|
content = f.read()
|
|
try:
|
|
return json.loads(content)
|
|
except:
|
|
return {}
|
|
|
|
|
|
def find_project_root(start_dir: Path) -> Path:
|
|
"""Find project root by looking for package.json."""
|
|
current = start_dir.resolve()
|
|
while current != current.parent:
|
|
if (current / 'package.json').exists():
|
|
return current
|
|
current = current.parent
|
|
return start_dir
|
|
|
|
|
|
class ContractValidator:
|
|
"""Validates implementation against API contract."""
|
|
|
|
def __init__(self, contract_path: Path, project_dir: Path):
|
|
self.contract_path = contract_path
|
|
self.project_dir = project_dir
|
|
self.contract = load_yaml(contract_path)
|
|
self.violations: List[Dict[str, Any]] = []
|
|
self.warnings: List[Dict[str, Any]] = []
|
|
|
|
def validate_all(self) -> Tuple[int, List[Dict], List[Dict]]:
|
|
"""
|
|
Run all validations.
|
|
|
|
Returns:
|
|
Tuple of (exit_code, violations, warnings)
|
|
"""
|
|
# Validate backend
|
|
self.validate_backend_routes()
|
|
self.validate_backend_type_imports()
|
|
|
|
# Validate frontend
|
|
self.validate_frontend_type_imports()
|
|
self.validate_frontend_api_calls()
|
|
|
|
# Determine exit code
|
|
critical_count = len([v for v in self.violations if v.get('severity') == 'critical'])
|
|
warning_count = len(self.warnings)
|
|
|
|
if critical_count > 0:
|
|
return 2, self.violations, self.warnings
|
|
elif len(self.violations) > 0:
|
|
return 1, self.violations, self.warnings
|
|
else:
|
|
return 0, self.violations, self.warnings
|
|
|
|
def validate_backend_routes(self) -> None:
|
|
"""Validate that all backend routes from contract exist."""
|
|
backend_routes = self.contract.get('backend_routes', [])
|
|
|
|
for route in backend_routes:
|
|
file_path = self.project_dir / route['file_path']
|
|
endpoint_id = route.get('endpoint_id', 'unknown')
|
|
export_name = route.get('export_name', 'GET')
|
|
|
|
if not file_path.exists():
|
|
self.violations.append({
|
|
'type': 'missing_route',
|
|
'severity': 'critical',
|
|
'endpoint_id': endpoint_id,
|
|
'expected_file': str(route['file_path']),
|
|
'message': f"Backend route file missing: {route['file_path']}",
|
|
})
|
|
continue
|
|
|
|
# Check if file exports the correct HTTP method
|
|
content = file_path.read_text()
|
|
|
|
# Check for Next.js App Router pattern: export async function GET/POST/etc.
|
|
export_pattern = rf'export\s+(async\s+)?function\s+{export_name}\s*\('
|
|
if not re.search(export_pattern, content):
|
|
# Also check for const exports: export const GET = ...
|
|
const_pattern = rf'export\s+const\s+{export_name}\s*='
|
|
if not re.search(const_pattern, content):
|
|
self.violations.append({
|
|
'type': 'missing_export',
|
|
'severity': 'critical',
|
|
'endpoint_id': endpoint_id,
|
|
'file': str(route['file_path']),
|
|
'expected_export': export_name,
|
|
'message': f"Route {route['file_path']} missing {export_name} export",
|
|
})
|
|
|
|
def validate_backend_type_imports(self) -> None:
|
|
"""Validate backend files import from shared types."""
|
|
backend_routes = self.contract.get('backend_routes', [])
|
|
|
|
for route in backend_routes:
|
|
file_path = self.project_dir / route['file_path']
|
|
if not file_path.exists():
|
|
continue # Already reported as missing
|
|
|
|
content = file_path.read_text()
|
|
|
|
# Check for import from @/types/api or ./types/api or ../types/api
|
|
import_patterns = [
|
|
r"import\s+.*from\s+['\"]@/types/api['\"]",
|
|
r"import\s+.*from\s+['\"]\.+/types/api['\"]",
|
|
r"import\s+type\s+.*from\s+['\"]@/types/api['\"]",
|
|
]
|
|
|
|
has_import = any(re.search(p, content) for p in import_patterns)
|
|
|
|
if not has_import:
|
|
self.warnings.append({
|
|
'type': 'missing_type_import',
|
|
'severity': 'warning',
|
|
'file': str(route['file_path']),
|
|
'message': f"Backend route {route['file_path']} should import types from @/types/api",
|
|
})
|
|
|
|
# Check for local type declarations that might conflict
|
|
local_type_patterns = [
|
|
r'(interface|type)\s+User\s*[={]',
|
|
r'(interface|type)\s+.*Request\s*[={]',
|
|
r'(interface|type)\s+.*Response\s*[={]',
|
|
]
|
|
|
|
for pattern in local_type_patterns:
|
|
match = re.search(pattern, content)
|
|
if match and 'import' not in content[:match.start()].split('\n')[-1]:
|
|
self.warnings.append({
|
|
'type': 'local_type_definition',
|
|
'severity': 'warning',
|
|
'file': str(route['file_path']),
|
|
'message': f"Backend route defines local types. Should import from @/types/api instead.",
|
|
})
|
|
break
|
|
|
|
def validate_frontend_type_imports(self) -> None:
|
|
"""Validate frontend files import from shared types."""
|
|
frontend_calls = self.contract.get('frontend_calls', [])
|
|
|
|
checked_files = set()
|
|
|
|
for call in frontend_calls:
|
|
file_path_str = call.get('source', {}).get('file_path', '')
|
|
if not file_path_str or file_path_str in checked_files:
|
|
continue
|
|
|
|
checked_files.add(file_path_str)
|
|
file_path = self.project_dir / file_path_str
|
|
|
|
if not file_path.exists():
|
|
# Check alternate paths (page vs component)
|
|
if '/components/' in file_path_str:
|
|
alt_path = file_path_str.replace('/components/', '/app/components/')
|
|
file_path = self.project_dir / alt_path
|
|
if not file_path.exists():
|
|
self.violations.append({
|
|
'type': 'missing_frontend_file',
|
|
'severity': 'high',
|
|
'expected_file': file_path_str,
|
|
'message': f"Frontend file missing: {file_path_str}",
|
|
})
|
|
continue
|
|
else:
|
|
self.violations.append({
|
|
'type': 'missing_frontend_file',
|
|
'severity': 'high',
|
|
'expected_file': file_path_str,
|
|
'message': f"Frontend file missing: {file_path_str}",
|
|
})
|
|
continue
|
|
|
|
content = file_path.read_text()
|
|
|
|
# Check for import from @/types/api
|
|
import_patterns = [
|
|
r"import\s+.*from\s+['\"]@/types/api['\"]",
|
|
r"import\s+.*from\s+['\"]\.+/types/api['\"]",
|
|
r"import\s+type\s+.*from\s+['\"]@/types/api['\"]",
|
|
]
|
|
|
|
has_import = any(re.search(p, content) for p in import_patterns)
|
|
|
|
if not has_import:
|
|
self.warnings.append({
|
|
'type': 'missing_type_import',
|
|
'severity': 'warning',
|
|
'file': file_path_str,
|
|
'message': f"Frontend file {file_path_str} should import types from @/types/api",
|
|
})
|
|
|
|
def validate_frontend_api_calls(self) -> None:
|
|
"""Validate frontend API calls match contract."""
|
|
frontend_calls = self.contract.get('frontend_calls', [])
|
|
endpoints = {e['id']: e for e in self.contract.get('endpoints', [])}
|
|
|
|
for call in frontend_calls:
|
|
file_path_str = call.get('source', {}).get('file_path', '')
|
|
endpoint_id = call.get('endpoint_id', '')
|
|
|
|
if not file_path_str or endpoint_id not in endpoints:
|
|
continue
|
|
|
|
file_path = self.project_dir / file_path_str
|
|
|
|
# Try alternate paths
|
|
if not file_path.exists():
|
|
if '/components/' in file_path_str:
|
|
alt_path = file_path_str.replace('/components/', '/app/components/')
|
|
file_path = self.project_dir / alt_path
|
|
|
|
if not file_path.exists():
|
|
continue # Already reported
|
|
|
|
content = file_path.read_text()
|
|
endpoint = endpoints[endpoint_id]
|
|
expected_method = endpoint.get('method', 'GET')
|
|
expected_path = endpoint.get('path', '')
|
|
|
|
# Check for API call to this endpoint
|
|
# Look for fetch calls or axios calls
|
|
fetch_patterns = [
|
|
rf"fetch\s*\(\s*['\"`][^'\"]*{re.escape(expected_path)}",
|
|
rf"fetch\s*\(\s*API_PATHS\.",
|
|
rf"axios\.{expected_method.lower()}\s*\(",
|
|
]
|
|
|
|
has_call = any(re.search(p, content, re.IGNORECASE) for p in fetch_patterns)
|
|
|
|
# If component is supposed to call this API but doesn't, it might be a dynamic call
|
|
# or using a different pattern - this is a soft warning
|
|
# The important validation is that when they DO call, they use correct types
|
|
|
|
def validate_types_file_exists(self) -> bool:
|
|
"""Check if shared types file exists."""
|
|
types_file = self.project_dir / 'app' / 'types' / 'api.ts'
|
|
if not types_file.exists():
|
|
self.violations.append({
|
|
'type': 'missing_types_file',
|
|
'severity': 'critical',
|
|
'expected_file': 'app/types/api.ts',
|
|
'message': "Shared types file missing: app/types/api.ts",
|
|
})
|
|
return False
|
|
return True
|
|
|
|
|
|
def print_report(violations: List[Dict], warnings: List[Dict]) -> None:
|
|
"""Print validation report."""
|
|
print("\n" + "=" * 60)
|
|
print("API CONTRACT VALIDATION REPORT")
|
|
print("=" * 60)
|
|
|
|
if not violations and not warnings:
|
|
print("\n✅ ALL VALIDATIONS PASSED")
|
|
print("\nBoth frontend and backend implementations comply with the API contract.")
|
|
return
|
|
|
|
if violations:
|
|
print(f"\n❌ VIOLATIONS FOUND: {len(violations)}")
|
|
print("-" * 40)
|
|
|
|
critical = [v for v in violations if v.get('severity') == 'critical']
|
|
high = [v for v in violations if v.get('severity') == 'high']
|
|
other = [v for v in violations if v.get('severity') not in ['critical', 'high']]
|
|
|
|
if critical:
|
|
print("\n🔴 CRITICAL (Must fix):")
|
|
for v in critical:
|
|
print(f" • {v['message']}")
|
|
if 'expected_file' in v:
|
|
print(f" Expected: {v['expected_file']}")
|
|
|
|
if high:
|
|
print("\n🟠 HIGH (Should fix):")
|
|
for v in high:
|
|
print(f" • {v['message']}")
|
|
|
|
if other:
|
|
print("\n🟡 OTHER:")
|
|
for v in other:
|
|
print(f" • {v['message']}")
|
|
|
|
if warnings:
|
|
print(f"\n⚠️ WARNINGS: {len(warnings)}")
|
|
print("-" * 40)
|
|
for w in warnings:
|
|
print(f" • {w['message']}")
|
|
|
|
print("\n" + "=" * 60)
|
|
|
|
if any(v.get('severity') == 'critical' for v in violations):
|
|
print("VERDICT: ❌ FAILED - Critical violations must be fixed")
|
|
elif violations:
|
|
print("VERDICT: ⚠️ WARNINGS - Review and fix if possible")
|
|
else:
|
|
print("VERDICT: ✅ PASSED with warnings")
|
|
|
|
|
|
def main():
|
|
"""Main entry point."""
|
|
if len(sys.argv) < 2:
|
|
print("Usage: validate_against_contract.py <api_contract.yml> [--project-dir <dir>]", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
contract_path = Path(sys.argv[1])
|
|
|
|
# Parse project directory
|
|
project_dir = Path('.')
|
|
if '--project-dir' in sys.argv:
|
|
idx = sys.argv.index('--project-dir')
|
|
project_dir = Path(sys.argv[idx + 1])
|
|
|
|
project_dir = find_project_root(project_dir)
|
|
|
|
if not contract_path.exists():
|
|
print(f"Error: Contract file not found: {contract_path}", file=sys.stderr)
|
|
sys.exit(2)
|
|
|
|
# Run validation
|
|
validator = ContractValidator(contract_path, project_dir)
|
|
|
|
# First check types file exists
|
|
validator.validate_types_file_exists()
|
|
|
|
# Run all validations
|
|
exit_code, violations, warnings = validator.validate_all()
|
|
|
|
# Print report
|
|
print_report(violations, warnings)
|
|
|
|
# Summary stats
|
|
print(f"\nValidation complete:")
|
|
print(f" Backend routes checked: {len(validator.contract.get('backend_routes', []))}")
|
|
print(f" Frontend calls checked: {len(validator.contract.get('frontend_calls', []))}")
|
|
print(f" Types defined: {len(validator.contract.get('types', []))}")
|
|
|
|
sys.exit(exit_code)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|