537 lines
18 KiB
Python
537 lines
18 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
API Contract Validator for guardrail workflow.
|
|
|
|
Validates that frontend API calls match backend endpoint definitions:
|
|
- Endpoints exist
|
|
- HTTP methods match
|
|
- Request/response structures align
|
|
|
|
Usage:
|
|
python3 validate_api_contract.py --manifest project_manifest.json --project-dir .
|
|
"""
|
|
|
|
import argparse
|
|
import json
|
|
import os
|
|
import re
|
|
import sys
|
|
from pathlib import Path
|
|
from typing import NamedTuple
|
|
|
|
|
|
class APICall(NamedTuple):
|
|
"""Frontend API call."""
|
|
file_path: str
|
|
line_number: int
|
|
endpoint: str
|
|
method: str
|
|
has_body: bool
|
|
raw_line: str
|
|
|
|
|
|
class APIEndpoint(NamedTuple):
|
|
"""Backend API endpoint."""
|
|
file_path: str
|
|
endpoint: str
|
|
method: str
|
|
has_request_body: bool
|
|
response_type: str
|
|
|
|
|
|
class ContractIssue(NamedTuple):
|
|
"""API contract violation."""
|
|
severity: str # ERROR, WARNING
|
|
category: str
|
|
message: str
|
|
file_path: str
|
|
line_number: int | None
|
|
suggestion: str
|
|
|
|
|
|
def load_manifest(manifest_path: str) -> dict | None:
|
|
"""Load manifest if exists."""
|
|
if not os.path.exists(manifest_path):
|
|
return None
|
|
try:
|
|
with open(manifest_path) as f:
|
|
return json.load(f)
|
|
except (json.JSONDecodeError, IOError):
|
|
return None
|
|
|
|
|
|
def find_frontend_files(project_dir: str) -> list[str]:
|
|
"""Find frontend source files."""
|
|
frontend_patterns = [
|
|
'app/**/*.tsx', 'app/**/*.ts',
|
|
'src/**/*.tsx', 'src/**/*.ts',
|
|
'pages/**/*.tsx', 'pages/**/*.ts',
|
|
'components/**/*.tsx', 'components/**/*.ts',
|
|
'hooks/**/*.ts', 'hooks/**/*.tsx',
|
|
'lib/**/*.ts', 'lib/**/*.tsx',
|
|
'services/**/*.ts', 'services/**/*.tsx',
|
|
]
|
|
|
|
# Exclude patterns
|
|
exclude_patterns = ['node_modules', '.next', 'dist', 'build', 'api']
|
|
|
|
files = []
|
|
for pattern in frontend_patterns:
|
|
base_dir = pattern.split('/')[0]
|
|
search_dir = Path(project_dir) / base_dir
|
|
if search_dir.exists():
|
|
for file_path in search_dir.rglob('*.ts*'):
|
|
path_str = str(file_path)
|
|
if not any(ex in path_str for ex in exclude_patterns):
|
|
# Skip API route files
|
|
if '/api/' not in path_str:
|
|
files.append(path_str)
|
|
|
|
return list(set(files))
|
|
|
|
|
|
def find_backend_files(project_dir: str) -> list[str]:
|
|
"""Find backend API route files."""
|
|
backend_patterns = [
|
|
'app/api/**/*.ts', 'app/api/**/*.tsx',
|
|
'pages/api/**/*.ts', 'pages/api/**/*.tsx',
|
|
'api/**/*.ts',
|
|
'src/api/**/*.ts',
|
|
'server/**/*.ts',
|
|
'routes/**/*.ts',
|
|
]
|
|
|
|
files = []
|
|
for pattern in backend_patterns:
|
|
base_parts = pattern.split('/')
|
|
search_dir = Path(project_dir)
|
|
for part in base_parts[:-1]:
|
|
if '*' not in part:
|
|
search_dir = search_dir / part
|
|
|
|
if search_dir.exists():
|
|
for file_path in search_dir.rglob('*.ts*'):
|
|
path_str = str(file_path)
|
|
if 'node_modules' not in path_str:
|
|
files.append(path_str)
|
|
|
|
return list(set(files))
|
|
|
|
|
|
def extract_frontend_api_calls(file_path: str) -> list[APICall]:
|
|
"""Extract API calls from frontend file."""
|
|
calls = []
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
content = f.read()
|
|
lines = content.split('\n')
|
|
except (IOError, UnicodeDecodeError):
|
|
return []
|
|
|
|
# Patterns for API calls
|
|
patterns = [
|
|
# fetch('/api/...', { method: 'POST', body: ... })
|
|
(r'''fetch\s*\(\s*['"](/api/[^'"]+)['"]''', 'fetch'),
|
|
# axios.get('/api/...'), axios.post('/api/...', data)
|
|
(r'''axios\.(get|post|put|patch|delete)\s*\(\s*['"](/api/[^'"]+)['"]''', 'axios'),
|
|
# api.get('/users'), api.post('/users', data)
|
|
(r'''api\.(get|post|put|patch|delete)\s*\(\s*['"]([^'"]+)['"]''', 'api_client'),
|
|
# useSWR('/api/...'), useSWR(() => '/api/...')
|
|
(r'''useSWR\s*\(\s*['"](/api/[^'"]+)['"]''', 'swr'),
|
|
# useQuery(['key'], () => fetch('/api/...'))
|
|
(r'''fetch\s*\(\s*[`'"](/api/[^`'"]+)[`'"]''', 'fetch_template'),
|
|
]
|
|
|
|
for line_num, line in enumerate(lines, 1):
|
|
for pattern, call_type in patterns:
|
|
matches = re.finditer(pattern, line, re.IGNORECASE)
|
|
for match in matches:
|
|
groups = match.groups()
|
|
|
|
if call_type == 'fetch' or call_type == 'swr' or call_type == 'fetch_template':
|
|
endpoint = groups[0]
|
|
# Try to detect method from options
|
|
method = 'GET'
|
|
if 'method' in line.lower():
|
|
method_match = re.search(r'''method:\s*['"](\w+)['"]''', line, re.IGNORECASE)
|
|
if method_match:
|
|
method = method_match.group(1).upper()
|
|
has_body = 'body:' in line.lower() or 'body=' in line.lower()
|
|
|
|
elif call_type == 'axios' or call_type == 'api_client':
|
|
method = groups[0].upper()
|
|
endpoint = groups[1]
|
|
# POST, PUT, PATCH typically have body
|
|
has_body = method in ['POST', 'PUT', 'PATCH']
|
|
else:
|
|
continue
|
|
|
|
# Normalize endpoint
|
|
if not endpoint.startswith('/api/'):
|
|
endpoint = f'/api/{endpoint.lstrip("/")}'
|
|
|
|
calls.append(APICall(
|
|
file_path=file_path,
|
|
line_number=line_num,
|
|
endpoint=endpoint,
|
|
method=method,
|
|
has_body=has_body,
|
|
raw_line=line.strip()
|
|
))
|
|
|
|
return calls
|
|
|
|
|
|
def extract_backend_endpoints(file_path: str) -> list[APIEndpoint]:
|
|
"""Extract API endpoints from backend file."""
|
|
endpoints = []
|
|
|
|
try:
|
|
with open(file_path, 'r', encoding='utf-8') as f:
|
|
content = f.read()
|
|
except (IOError, UnicodeDecodeError):
|
|
return []
|
|
|
|
# Determine endpoint from file path (Next.js App Router / Pages Router)
|
|
rel_path = file_path
|
|
if '/app/api/' in file_path:
|
|
# App Router: app/api/users/route.ts -> /api/users
|
|
api_path = re.search(r'/app/api/(.+?)/(route|page)\.(ts|tsx|js|jsx)', file_path)
|
|
if api_path:
|
|
endpoint = f'/api/{api_path.group(1)}'
|
|
else:
|
|
api_path = re.search(r'/app/api/(.+?)\.(ts|tsx|js|jsx)', file_path)
|
|
if api_path:
|
|
endpoint = f'/api/{api_path.group(1)}'
|
|
else:
|
|
endpoint = '/api/unknown'
|
|
elif '/pages/api/' in file_path:
|
|
# Pages Router: pages/api/users.ts -> /api/users
|
|
api_path = re.search(r'/pages/api/(.+?)\.(ts|tsx|js|jsx)', file_path)
|
|
if api_path:
|
|
endpoint = f'/api/{api_path.group(1)}'
|
|
else:
|
|
endpoint = '/api/unknown'
|
|
else:
|
|
endpoint = '/api/unknown'
|
|
|
|
# Clean up dynamic segments: [id] -> :id
|
|
endpoint = re.sub(r'\[(\w+)\]', r':\1', endpoint)
|
|
|
|
# Detect HTTP methods
|
|
# Next.js App Router exports: GET, POST, PUT, DELETE, PATCH
|
|
app_router_methods = re.findall(
|
|
r'export\s+(?:async\s+)?function\s+(GET|POST|PUT|DELETE|PATCH|HEAD|OPTIONS)',
|
|
content
|
|
)
|
|
|
|
# Pages Router: req.method checks
|
|
pages_router_methods = re.findall(
|
|
r'''req\.method\s*===?\s*['"](\w+)['"]''',
|
|
content
|
|
)
|
|
|
|
# Express-style: router.get, router.post, app.get, app.post
|
|
express_methods = re.findall(
|
|
r'''(?:router|app)\.(get|post|put|patch|delete)\s*\(''',
|
|
content,
|
|
re.IGNORECASE
|
|
)
|
|
|
|
methods = set()
|
|
methods.update(m.upper() for m in app_router_methods)
|
|
methods.update(m.upper() for m in pages_router_methods)
|
|
methods.update(m.upper() for m in express_methods)
|
|
|
|
# Default to GET if no methods detected
|
|
if not methods:
|
|
methods = {'GET'}
|
|
|
|
# Detect request body handling
|
|
has_body_patterns = [
|
|
r'request\.json\(\)',
|
|
r'req\.body',
|
|
r'await\s+request\.json',
|
|
r'JSON\.parse',
|
|
r'body\s*:',
|
|
]
|
|
has_request_body = any(re.search(p, content) for p in has_body_patterns)
|
|
|
|
# Detect response type
|
|
response_type = 'json' # default
|
|
if 'NextResponse.json' in content or 'res.json' in content:
|
|
response_type = 'json'
|
|
elif 'new Response(' in content:
|
|
response_type = 'response'
|
|
elif 'res.send' in content:
|
|
response_type = 'text'
|
|
|
|
for method in methods:
|
|
endpoints.append(APIEndpoint(
|
|
file_path=file_path,
|
|
endpoint=endpoint,
|
|
method=method,
|
|
has_request_body=has_request_body,
|
|
response_type=response_type
|
|
))
|
|
|
|
return endpoints
|
|
|
|
|
|
def normalize_endpoint(endpoint: str) -> str:
|
|
"""Normalize endpoint for comparison."""
|
|
# Remove query params
|
|
endpoint = endpoint.split('?')[0]
|
|
# Normalize dynamic segments
|
|
endpoint = re.sub(r':\w+', ':param', endpoint)
|
|
endpoint = re.sub(r'\$\{[^}]+\}', ':param', endpoint)
|
|
# Remove trailing slash
|
|
endpoint = endpoint.rstrip('/')
|
|
return endpoint.lower()
|
|
|
|
|
|
def match_endpoints(call_endpoint: str, api_endpoint: str) -> bool:
|
|
"""Check if frontend call matches backend endpoint."""
|
|
norm_call = normalize_endpoint(call_endpoint)
|
|
norm_api = normalize_endpoint(api_endpoint)
|
|
|
|
# Exact match
|
|
if norm_call == norm_api:
|
|
return True
|
|
|
|
# Pattern match with dynamic segments
|
|
api_pattern = re.sub(r':param', r'[^/]+', norm_api)
|
|
if re.match(f'^{api_pattern}$', norm_call):
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
def validate_api_contract(
|
|
project_dir: str,
|
|
manifest: dict | None = None
|
|
) -> tuple[list[ContractIssue], dict]:
|
|
"""Validate API contract between frontend and backend."""
|
|
issues = []
|
|
stats = {
|
|
'frontend_calls': 0,
|
|
'backend_endpoints': 0,
|
|
'matched': 0,
|
|
'unmatched_calls': 0,
|
|
'method_mismatches': 0,
|
|
'body_mismatches': 0,
|
|
}
|
|
|
|
# Find files
|
|
frontend_files = find_frontend_files(project_dir)
|
|
backend_files = find_backend_files(project_dir)
|
|
|
|
# Extract API calls and endpoints
|
|
all_calls: list[APICall] = []
|
|
all_endpoints: list[APIEndpoint] = []
|
|
|
|
for file in frontend_files:
|
|
all_calls.extend(extract_frontend_api_calls(file))
|
|
|
|
for file in backend_files:
|
|
all_endpoints.extend(extract_backend_endpoints(file))
|
|
|
|
stats['frontend_calls'] = len(all_calls)
|
|
stats['backend_endpoints'] = len(all_endpoints)
|
|
|
|
# Build endpoint lookup
|
|
endpoint_map: dict[str, list[APIEndpoint]] = {}
|
|
for ep in all_endpoints:
|
|
key = normalize_endpoint(ep.endpoint)
|
|
if key not in endpoint_map:
|
|
endpoint_map[key] = []
|
|
endpoint_map[key].append(ep)
|
|
|
|
# Validate each frontend call
|
|
for call in all_calls:
|
|
matched = False
|
|
|
|
for ep in all_endpoints:
|
|
if match_endpoints(call.endpoint, ep.endpoint):
|
|
matched = True
|
|
|
|
# Check method match
|
|
if call.method != ep.method:
|
|
# Check if endpoint supports this method
|
|
endpoint_methods = [e.method for e in all_endpoints
|
|
if match_endpoints(call.endpoint, e.endpoint)]
|
|
if call.method not in endpoint_methods:
|
|
issues.append(ContractIssue(
|
|
severity='ERROR',
|
|
category='METHOD_MISMATCH',
|
|
message=f"Frontend calls {call.method} {call.endpoint} but backend only supports {endpoint_methods}",
|
|
file_path=call.file_path,
|
|
line_number=call.line_number,
|
|
suggestion=f"Change method to one of: {', '.join(endpoint_methods)}"
|
|
))
|
|
stats['method_mismatches'] += 1
|
|
continue
|
|
|
|
# Check body requirements
|
|
if call.has_body and not ep.has_request_body:
|
|
issues.append(ContractIssue(
|
|
severity='WARNING',
|
|
category='BODY_MISMATCH',
|
|
message=f"Frontend sends body to {call.endpoint} but backend may not process it",
|
|
file_path=call.file_path,
|
|
line_number=call.line_number,
|
|
suggestion="Verify backend handles request body or remove body from frontend call"
|
|
))
|
|
stats['body_mismatches'] += 1
|
|
|
|
if not call.has_body and ep.has_request_body and ep.method in ['POST', 'PUT', 'PATCH']:
|
|
issues.append(ContractIssue(
|
|
severity='WARNING',
|
|
category='MISSING_BODY',
|
|
message=f"Backend expects body for {call.method} {call.endpoint} but frontend may not send it",
|
|
file_path=call.file_path,
|
|
line_number=call.line_number,
|
|
suggestion="Add request body to frontend call"
|
|
))
|
|
|
|
stats['matched'] += 1
|
|
break
|
|
|
|
if not matched:
|
|
issues.append(ContractIssue(
|
|
severity='ERROR',
|
|
category='ENDPOINT_NOT_FOUND',
|
|
message=f"Frontend calls {call.method} {call.endpoint} but no matching backend endpoint found",
|
|
file_path=call.file_path,
|
|
line_number=call.line_number,
|
|
suggestion=f"Create backend endpoint at {call.endpoint} or fix the frontend URL"
|
|
))
|
|
stats['unmatched_calls'] += 1
|
|
|
|
# Check for unused backend endpoints
|
|
called_endpoints = set()
|
|
for call in all_calls:
|
|
called_endpoints.add((normalize_endpoint(call.endpoint), call.method))
|
|
|
|
for ep in all_endpoints:
|
|
key = (normalize_endpoint(ep.endpoint), ep.method)
|
|
if key not in called_endpoints:
|
|
# Check if any call matches with different method
|
|
matching_calls = [c for c in all_calls
|
|
if match_endpoints(c.endpoint, ep.endpoint)]
|
|
if not matching_calls:
|
|
issues.append(ContractIssue(
|
|
severity='WARNING',
|
|
category='UNUSED_ENDPOINT',
|
|
message=f"Backend endpoint {ep.method} {ep.endpoint} is not called by frontend",
|
|
file_path=ep.file_path,
|
|
line_number=None,
|
|
suggestion="Verify endpoint is needed or remove unused code"
|
|
))
|
|
|
|
return issues, stats
|
|
|
|
|
|
def format_report(issues: list[ContractIssue], stats: dict) -> str:
|
|
"""Format validation report."""
|
|
lines = []
|
|
|
|
lines.append("")
|
|
lines.append("=" * 70)
|
|
lines.append(" API CONTRACT VALIDATION REPORT")
|
|
lines.append("=" * 70)
|
|
lines.append("")
|
|
|
|
# Stats
|
|
lines.append("SUMMARY")
|
|
lines.append("-" * 70)
|
|
lines.append(f" Frontend API calls found: {stats['frontend_calls']}")
|
|
lines.append(f" Backend endpoints found: {stats['backend_endpoints']}")
|
|
lines.append(f" Matched calls: {stats['matched']}")
|
|
lines.append(f" Unmatched calls: {stats['unmatched_calls']}")
|
|
lines.append(f" Method mismatches: {stats['method_mismatches']}")
|
|
lines.append(f" Body mismatches: {stats['body_mismatches']}")
|
|
lines.append("")
|
|
|
|
# Issues by severity
|
|
errors = [i for i in issues if i.severity == 'ERROR']
|
|
warnings = [i for i in issues if i.severity == 'WARNING']
|
|
|
|
if errors:
|
|
lines.append("ERRORS (must fix)")
|
|
lines.append("-" * 70)
|
|
for i, issue in enumerate(errors, 1):
|
|
lines.append(f" {i}. [{issue.category}] {issue.message}")
|
|
lines.append(f" File: {issue.file_path}:{issue.line_number or '?'}")
|
|
lines.append(f" Fix: {issue.suggestion}")
|
|
lines.append("")
|
|
|
|
if warnings:
|
|
lines.append("WARNINGS (review)")
|
|
lines.append("-" * 70)
|
|
for i, issue in enumerate(warnings, 1):
|
|
lines.append(f" {i}. [{issue.category}] {issue.message}")
|
|
lines.append(f" File: {issue.file_path}:{issue.line_number or '?'}")
|
|
lines.append(f" Fix: {issue.suggestion}")
|
|
lines.append("")
|
|
|
|
# Result
|
|
lines.append("=" * 70)
|
|
if not errors:
|
|
lines.append(" RESULT: PASS (no errors)")
|
|
else:
|
|
lines.append(f" RESULT: FAIL ({len(errors)} errors)")
|
|
lines.append("=" * 70)
|
|
|
|
return "\n".join(lines)
|
|
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Validate API contract")
|
|
parser.add_argument("--manifest", help="Path to project_manifest.json")
|
|
parser.add_argument("--project-dir", default=".", help="Project directory")
|
|
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
|
parser.add_argument("--strict", action="store_true", help="Fail on warnings too")
|
|
args = parser.parse_args()
|
|
|
|
manifest = None
|
|
if args.manifest:
|
|
manifest = load_manifest(args.manifest)
|
|
|
|
issues, stats = validate_api_contract(args.project_dir, manifest)
|
|
|
|
if args.json:
|
|
output = {
|
|
'stats': stats,
|
|
'issues': [
|
|
{
|
|
'severity': i.severity,
|
|
'category': i.category,
|
|
'message': i.message,
|
|
'file_path': i.file_path,
|
|
'line_number': i.line_number,
|
|
'suggestion': i.suggestion
|
|
}
|
|
for i in issues
|
|
],
|
|
'result': 'PASS' if not any(i.severity == 'ERROR' for i in issues) else 'FAIL'
|
|
}
|
|
print(json.dumps(output, indent=2))
|
|
else:
|
|
print(format_report(issues, stats))
|
|
|
|
# Exit code
|
|
errors = [i for i in issues if i.severity == 'ERROR']
|
|
warnings = [i for i in issues if i.severity == 'WARNING']
|
|
|
|
if errors:
|
|
return 1
|
|
if args.strict and warnings:
|
|
return 1
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main())
|