1151 lines
42 KiB
Python
Executable File
1151 lines
42 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""Workflow state management for automated orchestration with approval gates."""
|
|
|
|
import argparse
|
|
import json
|
|
import os
|
|
import shutil
|
|
import sys
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Optional
|
|
|
|
# Try to import yaml, fall back to basic parsing if not available
|
|
try:
|
|
import yaml
|
|
HAS_YAML = True
|
|
except ImportError:
|
|
HAS_YAML = False
|
|
|
|
|
|
# ============================================================================
|
|
# YAML Helpers
|
|
# ============================================================================
|
|
|
|
def load_yaml(filepath: str) -> dict:
|
|
"""Load YAML file."""
|
|
if not os.path.exists(filepath):
|
|
return {}
|
|
with open(filepath, 'r') as f:
|
|
content = f.read()
|
|
if not content.strip():
|
|
return {}
|
|
|
|
if HAS_YAML:
|
|
return yaml.safe_load(content) or {}
|
|
|
|
# Simple fallback parser
|
|
result = {}
|
|
current_key = None
|
|
current_list = None
|
|
|
|
for line in content.split('\n'):
|
|
line = line.rstrip()
|
|
if not line or line.startswith('#'):
|
|
continue
|
|
|
|
if line.startswith(' - '):
|
|
if current_list is not None:
|
|
value = line[4:].strip()
|
|
# Handle quoted strings
|
|
if (value.startswith('"') and value.endswith('"')) or \
|
|
(value.startswith("'") and value.endswith("'")):
|
|
value = value[1:-1]
|
|
current_list.append(value)
|
|
continue
|
|
|
|
if ':' in line and not line.startswith(' '):
|
|
key, _, value = line.partition(':')
|
|
key = key.strip()
|
|
value = value.strip()
|
|
|
|
if value == '[]':
|
|
result[key] = []
|
|
current_list = result[key]
|
|
elif value == '{}':
|
|
result[key] = {}
|
|
current_list = None
|
|
elif value == 'null' or value == '~':
|
|
result[key] = None
|
|
current_list = None
|
|
elif value == 'true':
|
|
result[key] = True
|
|
current_list = None
|
|
elif value == 'false':
|
|
result[key] = False
|
|
current_list = None
|
|
elif value.isdigit():
|
|
result[key] = int(value)
|
|
current_list = None
|
|
elif value:
|
|
# Handle quoted strings
|
|
if (value.startswith('"') and value.endswith('"')) or \
|
|
(value.startswith("'") and value.endswith("'")):
|
|
value = value[1:-1]
|
|
result[key] = value
|
|
current_list = None
|
|
else:
|
|
result[key] = []
|
|
current_list = result[key]
|
|
current_key = key
|
|
|
|
return result
|
|
|
|
|
|
def save_yaml(filepath: str, data: dict):
|
|
"""Save data to YAML file."""
|
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
|
|
|
if HAS_YAML:
|
|
with open(filepath, 'w') as f:
|
|
yaml.dump(data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
else:
|
|
# Simple YAML writer
|
|
def write_value(value, indent=0):
|
|
prefix = ' ' * indent
|
|
lines = []
|
|
if isinstance(value, dict):
|
|
for k, v in value.items():
|
|
if isinstance(v, (dict, list)) and v:
|
|
lines.append(f"{prefix}{k}:")
|
|
lines.extend(write_value(v, indent + 1))
|
|
elif isinstance(v, list):
|
|
lines.append(f"{prefix}{k}: []")
|
|
else:
|
|
lines.append(f"{prefix}{k}: {v}")
|
|
elif isinstance(value, list):
|
|
for item in value:
|
|
if isinstance(item, dict):
|
|
lines.append(f"{prefix}-")
|
|
for k, v in item.items():
|
|
lines.append(f"{prefix} {k}: {v}")
|
|
else:
|
|
lines.append(f"{prefix}- {item}")
|
|
return lines
|
|
|
|
lines = write_value(data)
|
|
with open(filepath, 'w') as f:
|
|
f.write('\n'.join(lines))
|
|
|
|
|
|
# ============================================================================
|
|
# Workflow State Management
|
|
# ============================================================================
|
|
|
|
PHASES = [
|
|
'INITIALIZING',
|
|
'DESIGNING',
|
|
'AWAITING_DESIGN_APPROVAL',
|
|
'DESIGN_APPROVED',
|
|
'DESIGN_REJECTED',
|
|
'IMPLEMENTING',
|
|
'INTEGRATING', # New phase for connecting features to existing project
|
|
'REVIEWING',
|
|
'SECURITY_REVIEW', # Security audit phase
|
|
'AWAITING_IMPL_APPROVAL',
|
|
'IMPL_APPROVED',
|
|
'IMPL_REJECTED',
|
|
'COMPLETING',
|
|
'COMPLETED',
|
|
'PAUSED',
|
|
'FAILED'
|
|
]
|
|
|
|
VALID_TRANSITIONS = {
|
|
'INITIALIZING': ['DESIGNING', 'FAILED'],
|
|
'DESIGNING': ['AWAITING_DESIGN_APPROVAL', 'FAILED'],
|
|
'AWAITING_DESIGN_APPROVAL': ['DESIGN_APPROVED', 'DESIGN_REJECTED', 'PAUSED'],
|
|
'DESIGN_APPROVED': ['IMPLEMENTING', 'FAILED'],
|
|
'DESIGN_REJECTED': ['DESIGNING'],
|
|
'IMPLEMENTING': ['INTEGRATING', 'FAILED', 'PAUSED'], # Now goes to INTEGRATING first
|
|
'INTEGRATING': ['REVIEWING', 'IMPLEMENTING', 'FAILED'], # Can go back to fix integration
|
|
'REVIEWING': ['SECURITY_REVIEW', 'IMPLEMENTING', 'FAILED'], # Must pass through security
|
|
'SECURITY_REVIEW': ['AWAITING_IMPL_APPROVAL', 'IMPLEMENTING', 'FAILED'], # Can go back to fix
|
|
'AWAITING_IMPL_APPROVAL': ['IMPL_APPROVED', 'IMPL_REJECTED', 'PAUSED'],
|
|
'IMPL_APPROVED': ['COMPLETING', 'FAILED'],
|
|
'IMPL_REJECTED': ['IMPLEMENTING'],
|
|
'COMPLETING': ['COMPLETED', 'FAILED'],
|
|
'COMPLETED': [],
|
|
'PAUSED': PHASES, # Can resume to any phase
|
|
'FAILED': ['INITIALIZING', 'DESIGNING', 'IMPLEMENTING'] # Can retry
|
|
}
|
|
|
|
|
|
def get_workflow_dir() -> Path:
|
|
"""Get the .workflow directory path."""
|
|
return Path('.workflow')
|
|
|
|
|
|
def get_current_state_path() -> Path:
|
|
"""Get the current workflow state file path."""
|
|
return get_workflow_dir() / 'current.yml'
|
|
|
|
|
|
def get_history_dir() -> Path:
|
|
"""Get the workflow history directory."""
|
|
return get_workflow_dir() / 'history'
|
|
|
|
|
|
def create_workflow(feature: str) -> dict:
|
|
"""Create a new workflow state."""
|
|
now = datetime.now()
|
|
workflow_id = f"workflow_{now.strftime('%Y%m%d_%H%M%S')}"
|
|
|
|
state = {
|
|
'id': workflow_id,
|
|
'feature': feature,
|
|
'current_phase': 'INITIALIZING',
|
|
'gates': {
|
|
'design_approval': {
|
|
'status': 'pending',
|
|
'approved_at': None,
|
|
'approved_by': None,
|
|
'rejection_reason': None,
|
|
'revision_count': 0
|
|
},
|
|
'implementation_approval': {
|
|
'status': 'pending',
|
|
'approved_at': None,
|
|
'approved_by': None,
|
|
'rejection_reason': None,
|
|
'revision_count': 0
|
|
}
|
|
},
|
|
'progress': {
|
|
'entities_designed': 0,
|
|
'tasks_created': 0,
|
|
'tasks_implemented': 0,
|
|
'tasks_reviewed': 0,
|
|
'tasks_approved': 0,
|
|
'tasks_completed': 0
|
|
},
|
|
'tasks': {
|
|
'pending': [],
|
|
'in_progress': [],
|
|
'review': [],
|
|
'approved': [],
|
|
'completed': [],
|
|
'blocked': []
|
|
},
|
|
'started_at': now.isoformat(),
|
|
'updated_at': now.isoformat(),
|
|
'completed_at': None,
|
|
'last_error': None,
|
|
'resume_point': {
|
|
'phase': 'INITIALIZING',
|
|
'task_id': None,
|
|
'action': 'start_workflow'
|
|
},
|
|
'checkpoints': [] # List of checkpoint snapshots for recovery
|
|
}
|
|
|
|
# Ensure directory exists
|
|
get_workflow_dir().mkdir(exist_ok=True)
|
|
get_history_dir().mkdir(exist_ok=True)
|
|
|
|
# Save state
|
|
save_yaml(str(get_current_state_path()), state)
|
|
|
|
return state
|
|
|
|
|
|
def load_current_workflow() -> Optional[dict]:
|
|
"""Load the current workflow state from the active version."""
|
|
state_path = get_current_state_path()
|
|
if not state_path.exists():
|
|
return None
|
|
|
|
# Read current.yml to get active version
|
|
current = load_yaml(str(state_path))
|
|
active_version = current.get('active_version')
|
|
if not active_version:
|
|
return None
|
|
|
|
# Load the version's session.yml
|
|
version_session_path = get_workflow_dir() / 'versions' / active_version / 'session.yml'
|
|
if not version_session_path.exists():
|
|
return None
|
|
|
|
session = load_yaml(str(version_session_path))
|
|
|
|
current_phase = session.get('current_phase', 'INITIALIZING')
|
|
|
|
# Convert session format to state format expected by show_status
|
|
return {
|
|
'id': session.get('session_id', active_version),
|
|
'feature': session.get('feature', 'Unknown'),
|
|
'current_phase': current_phase,
|
|
'gates': {
|
|
'design_approval': session.get('approvals', {}).get('design', {'status': 'pending'}),
|
|
'implementation_approval': session.get('approvals', {}).get('implementation', {'status': 'pending'})
|
|
},
|
|
'progress': {
|
|
'entities_designed': session.get('summary', {}).get('entities_created', 0),
|
|
'tasks_created': session.get('summary', {}).get('total_tasks', 0),
|
|
'tasks_implemented': session.get('summary', {}).get('tasks_completed', 0),
|
|
'tasks_reviewed': 0,
|
|
'tasks_completed': session.get('summary', {}).get('tasks_completed', 0)
|
|
},
|
|
'tasks': {
|
|
'pending': [],
|
|
'in_progress': [],
|
|
'review': [],
|
|
'approved': [],
|
|
'completed': session.get('task_sessions', []),
|
|
'blocked': []
|
|
},
|
|
'version': active_version,
|
|
'status': session.get('status', 'unknown'),
|
|
'last_error': None,
|
|
'started_at': session.get('started_at', ''),
|
|
'updated_at': session.get('updated_at', ''),
|
|
'completed_at': session.get('completed_at'),
|
|
'resume_point': {
|
|
'phase': current_phase,
|
|
'task_id': None,
|
|
'action': 'continue_workflow'
|
|
}
|
|
}
|
|
|
|
|
|
def save_workflow(state: dict):
|
|
"""Save workflow state to the version's session.yml file."""
|
|
# Get active version
|
|
current_path = get_current_state_path()
|
|
if not current_path.exists():
|
|
print("Error: No current.yml found")
|
|
return
|
|
|
|
current = load_yaml(str(current_path))
|
|
active_version = current.get('active_version')
|
|
if not active_version:
|
|
print("Error: No active version set")
|
|
return
|
|
|
|
# Get the version's session.yml path
|
|
version_session_path = get_workflow_dir() / 'versions' / active_version / 'session.yml'
|
|
if not version_session_path.exists():
|
|
print(f"Error: Session file not found: {version_session_path}")
|
|
return
|
|
|
|
# Load existing session data
|
|
session = load_yaml(str(version_session_path))
|
|
|
|
# Create backup
|
|
backup_path = version_session_path.with_suffix('.yml.bak')
|
|
shutil.copy(version_session_path, backup_path)
|
|
|
|
# Update session with state changes
|
|
session['current_phase'] = state['current_phase']
|
|
session['updated_at'] = datetime.now().isoformat()
|
|
|
|
if state.get('completed_at'):
|
|
session['completed_at'] = state['completed_at']
|
|
session['status'] = 'completed'
|
|
|
|
# Update approvals
|
|
if 'gates' in state:
|
|
if 'approvals' not in session:
|
|
session['approvals'] = {}
|
|
if state['gates'].get('design_approval', {}).get('status') == 'approved':
|
|
session['approvals']['design'] = state['gates']['design_approval']
|
|
if state['gates'].get('implementation_approval', {}).get('status') == 'approved':
|
|
session['approvals']['implementation'] = state['gates']['implementation_approval']
|
|
|
|
save_yaml(str(version_session_path), session)
|
|
|
|
|
|
def transition_phase(state: dict, new_phase: str, error: str = None) -> bool:
|
|
"""Transition workflow to a new phase."""
|
|
current = state['current_phase']
|
|
|
|
if new_phase not in PHASES:
|
|
print(f"Error: Invalid phase '{new_phase}'")
|
|
return False
|
|
|
|
if new_phase not in VALID_TRANSITIONS.get(current, []):
|
|
print(f"Error: Cannot transition from '{current}' to '{new_phase}'")
|
|
print(f"Valid transitions: {VALID_TRANSITIONS.get(current, [])}")
|
|
return False
|
|
|
|
state['current_phase'] = new_phase
|
|
state['resume_point']['phase'] = new_phase
|
|
|
|
if new_phase == 'FAILED' and error:
|
|
state['last_error'] = error
|
|
|
|
if new_phase == 'COMPLETED':
|
|
state['completed_at'] = datetime.now().isoformat()
|
|
|
|
# Set appropriate resume action
|
|
resume_actions = {
|
|
'INITIALIZING': 'start_workflow',
|
|
'DESIGNING': 'continue_design',
|
|
'AWAITING_DESIGN_APPROVAL': 'await_user_approval',
|
|
'DESIGN_APPROVED': 'start_implementation',
|
|
'DESIGN_REJECTED': 'revise_design',
|
|
'IMPLEMENTING': 'continue_implementation',
|
|
'INTEGRATING': 'integrate_with_project',
|
|
'REVIEWING': 'continue_review',
|
|
'SECURITY_REVIEW': 'run_security_audit',
|
|
'AWAITING_IMPL_APPROVAL': 'await_user_approval',
|
|
'IMPL_APPROVED': 'start_completion',
|
|
'IMPL_REJECTED': 'fix_implementation',
|
|
'COMPLETING': 'continue_completion',
|
|
'COMPLETED': 'workflow_done',
|
|
'PAUSED': 'resume_workflow',
|
|
'FAILED': 'retry_or_abort'
|
|
}
|
|
state['resume_point']['action'] = resume_actions.get(new_phase, 'unknown')
|
|
|
|
save_workflow(state)
|
|
return True
|
|
|
|
|
|
def approve_gate(state: dict, gate: str, approver: str = 'user') -> bool:
|
|
"""Approve a gate."""
|
|
if gate not in ['design_approval', 'implementation_approval']:
|
|
print(f"Error: Invalid gate '{gate}'")
|
|
return False
|
|
|
|
state['gates'][gate]['status'] = 'approved'
|
|
state['gates'][gate]['approved_at'] = datetime.now().isoformat()
|
|
state['gates'][gate]['approved_by'] = approver
|
|
|
|
# Transition to next phase
|
|
if gate == 'design_approval':
|
|
transition_phase(state, 'DESIGN_APPROVED')
|
|
else:
|
|
transition_phase(state, 'IMPL_APPROVED')
|
|
|
|
return True
|
|
|
|
|
|
def reject_gate(state: dict, gate: str, reason: str) -> bool:
|
|
"""Reject a gate."""
|
|
if gate not in ['design_approval', 'implementation_approval']:
|
|
print(f"Error: Invalid gate '{gate}'")
|
|
return False
|
|
|
|
state['gates'][gate]['status'] = 'rejected'
|
|
state['gates'][gate]['rejection_reason'] = reason
|
|
state['gates'][gate]['revision_count'] += 1
|
|
|
|
# Transition to rejection phase
|
|
if gate == 'design_approval':
|
|
transition_phase(state, 'DESIGN_REJECTED')
|
|
else:
|
|
transition_phase(state, 'IMPL_REJECTED')
|
|
|
|
return True
|
|
|
|
|
|
def update_progress(state: dict, **kwargs):
|
|
"""Update progress counters."""
|
|
for key, value in kwargs.items():
|
|
if key in state['progress']:
|
|
state['progress'][key] = value
|
|
save_workflow(state)
|
|
|
|
|
|
def update_task_status(state: dict, task_id: str, new_status: str):
|
|
"""Update task status in workflow state."""
|
|
# Remove from all status lists
|
|
for status in state['tasks']:
|
|
if task_id in state['tasks'][status]:
|
|
state['tasks'][status].remove(task_id)
|
|
|
|
# Add to new status list
|
|
if new_status in state['tasks']:
|
|
state['tasks'][new_status].append(task_id)
|
|
|
|
# Update resume point if task is in progress
|
|
if new_status == 'in_progress':
|
|
state['resume_point']['task_id'] = task_id
|
|
|
|
save_workflow(state)
|
|
|
|
|
|
def save_checkpoint(state: dict, description: str, data: dict = None) -> dict:
|
|
"""Save a checkpoint for recovery during long operations.
|
|
|
|
Args:
|
|
state: Current workflow state
|
|
description: Human-readable description of checkpoint
|
|
data: Optional additional data to store
|
|
|
|
Returns:
|
|
The checkpoint object that was created
|
|
"""
|
|
checkpoint = {
|
|
'id': f"checkpoint_{datetime.now().strftime('%Y%m%d_%H%M%S')}",
|
|
'timestamp': datetime.now().isoformat(),
|
|
'phase': state['current_phase'],
|
|
'description': description,
|
|
'resume_point': state['resume_point'].copy(),
|
|
'progress': state['progress'].copy(),
|
|
'data': data or {}
|
|
}
|
|
|
|
# Keep only last 10 checkpoints to avoid bloat
|
|
if 'checkpoints' not in state:
|
|
state['checkpoints'] = []
|
|
state['checkpoints'].append(checkpoint)
|
|
if len(state['checkpoints']) > 10:
|
|
state['checkpoints'] = state['checkpoints'][-10:]
|
|
|
|
save_workflow(state)
|
|
return checkpoint
|
|
|
|
|
|
def get_latest_checkpoint(state: dict) -> Optional[dict]:
|
|
"""Get the most recent checkpoint.
|
|
|
|
Returns:
|
|
Latest checkpoint or None if no checkpoints exist
|
|
"""
|
|
checkpoints = state.get('checkpoints', [])
|
|
return checkpoints[-1] if checkpoints else None
|
|
|
|
|
|
def restore_from_checkpoint(state: dict, checkpoint_id: str = None) -> bool:
|
|
"""Restore workflow state from a checkpoint.
|
|
|
|
Args:
|
|
state: Current workflow state
|
|
checkpoint_id: Optional specific checkpoint ID, defaults to latest
|
|
|
|
Returns:
|
|
True if restoration was successful
|
|
"""
|
|
checkpoints = state.get('checkpoints', [])
|
|
if not checkpoints:
|
|
print("Error: No checkpoints available")
|
|
return False
|
|
|
|
# Find checkpoint
|
|
if checkpoint_id:
|
|
checkpoint = next((c for c in checkpoints if c['id'] == checkpoint_id), None)
|
|
if not checkpoint:
|
|
print(f"Error: Checkpoint '{checkpoint_id}' not found")
|
|
return False
|
|
else:
|
|
checkpoint = checkpoints[-1]
|
|
|
|
# Restore state from checkpoint
|
|
state['resume_point'] = checkpoint['resume_point'].copy()
|
|
state['progress'] = checkpoint['progress'].copy()
|
|
state['current_phase'] = checkpoint['phase']
|
|
state['last_error'] = None # Clear any error since we're recovering
|
|
|
|
save_workflow(state)
|
|
print(f"Restored from checkpoint: {checkpoint['description']}")
|
|
return True
|
|
|
|
|
|
def list_checkpoints(state: dict) -> list:
|
|
"""List all available checkpoints.
|
|
|
|
Returns:
|
|
List of checkpoint summaries
|
|
"""
|
|
return [
|
|
{
|
|
'id': c['id'],
|
|
'timestamp': c['timestamp'],
|
|
'phase': c['phase'],
|
|
'description': c['description']
|
|
}
|
|
for c in state.get('checkpoints', [])
|
|
]
|
|
|
|
|
|
def clear_checkpoints(state: dict):
|
|
"""Clear all checkpoints (typically after successful completion)."""
|
|
state['checkpoints'] = []
|
|
save_workflow(state)
|
|
|
|
|
|
def archive_workflow(state: dict, suffix: str = ''):
|
|
"""Archive completed/aborted workflow."""
|
|
history_dir = get_history_dir()
|
|
history_dir.mkdir(exist_ok=True)
|
|
|
|
filename = f"{state['id']}{suffix}.yml"
|
|
archive_path = history_dir / filename
|
|
|
|
save_yaml(str(archive_path), state)
|
|
|
|
# Remove current state
|
|
current_path = get_current_state_path()
|
|
if current_path.exists():
|
|
current_path.unlink()
|
|
|
|
|
|
# ============================================================================
|
|
# Implementation Validation
|
|
# ============================================================================
|
|
|
|
def validate_implementation(show_checklist: bool = False) -> dict:
|
|
"""Validate implementation against design document.
|
|
|
|
Checks:
|
|
- Components import from @/types/component-props
|
|
- Components use object props (not flat)
|
|
- All events from design are implemented
|
|
- API routes exist and match design
|
|
- Prisma models match design fields
|
|
|
|
Returns:
|
|
Validation result dict with errors, warnings, and passed checks
|
|
"""
|
|
import glob
|
|
import re
|
|
|
|
result = {
|
|
'passed': [],
|
|
'warnings': [],
|
|
'errors': [],
|
|
'stats': {
|
|
'components_checked': 0,
|
|
'apis_checked': 0,
|
|
'models_checked': 0
|
|
}
|
|
}
|
|
|
|
# Get active version
|
|
current_path = get_current_state_path()
|
|
if not current_path.exists():
|
|
result['errors'].append("No active workflow found")
|
|
return result
|
|
|
|
current = load_yaml(str(current_path))
|
|
active_version = current.get('active_version')
|
|
if not active_version:
|
|
result['errors'].append("No active version set")
|
|
return result
|
|
|
|
# Load design document
|
|
design_path = get_workflow_dir() / 'versions' / active_version / 'design' / 'design_document.yml'
|
|
if not design_path.exists():
|
|
result['errors'].append(f"Design document not found: {design_path}")
|
|
return result
|
|
|
|
design = load_yaml(str(design_path))
|
|
|
|
# Check components
|
|
components = design.get('components', [])
|
|
for comp in components:
|
|
comp_id = comp.get('id', '')
|
|
comp_name = comp.get('name', '')
|
|
props = comp.get('props', [])
|
|
events = comp.get('events', [])
|
|
|
|
result['stats']['components_checked'] += 1
|
|
|
|
# Find component file
|
|
comp_files = glob.glob(f"app/components/**/{comp_name}.tsx", recursive=True)
|
|
comp_files += glob.glob(f"app/components/**/{comp_name}.ts", recursive=True)
|
|
comp_files += glob.glob(f"components/**/{comp_name}.tsx", recursive=True)
|
|
comp_files += glob.glob(f"src/components/**/{comp_name}.tsx", recursive=True)
|
|
|
|
if not comp_files:
|
|
result['errors'].append(f"Component file not found: {comp_name}")
|
|
continue
|
|
|
|
comp_file = comp_files[0]
|
|
|
|
try:
|
|
with open(comp_file, 'r') as f:
|
|
content = f.read()
|
|
|
|
# Check 1: Props imported from @/types/component-props
|
|
if "@/types/component-props" in content or "component-props" in content:
|
|
result['passed'].append(f"✅ {comp_name}: Props imported from @/types/component-props")
|
|
else:
|
|
# Check if using inline interface (bad)
|
|
if f"interface {comp_name}Props" in content:
|
|
result['errors'].append(
|
|
f"❌ {comp_name}: Defines own props interface - should import from @/types/component-props"
|
|
)
|
|
else:
|
|
result['warnings'].append(
|
|
f"⚠️ {comp_name}: Could not verify props import from @/types/component-props"
|
|
)
|
|
|
|
# Check 2: Object props used (not flat)
|
|
# Look for destructuring patterns that suggest flat props
|
|
# Bad: function Comp({ id, title, name }) - flat primitive props
|
|
# Good: function Comp({ song, album }) - object props
|
|
for prop in props:
|
|
prop_name = prop.get('name', '')
|
|
prop_type = prop.get('type', '')
|
|
|
|
# If prop type is a model type (PascalCase), ensure it's used as object
|
|
if prop_type and prop_type[0].isupper() and prop_type != 'string' and prop_type != 'boolean':
|
|
# Check if the prop is accessed like an object (prop.field)
|
|
if f"{prop_name}." in content:
|
|
result['passed'].append(f"✅ {comp_name}: Uses object prop '{prop_name}' correctly")
|
|
else:
|
|
result['warnings'].append(
|
|
f"⚠️ {comp_name}: Object prop '{prop_name}' may not be used as object"
|
|
)
|
|
|
|
# Check 3: All events implemented
|
|
for event in events:
|
|
event_name = event.get('name', '')
|
|
if event_name:
|
|
# Check if event handler is present in props destructuring or used
|
|
if event_name in content:
|
|
result['passed'].append(f"✅ {comp_name}: Event '{event_name}' implemented")
|
|
else:
|
|
result['errors'].append(
|
|
f"❌ {comp_name}: Event '{event_name}' not implemented (required by design)"
|
|
)
|
|
|
|
except Exception as e:
|
|
result['errors'].append(f"Error reading {comp_file}: {str(e)}")
|
|
|
|
# Check API routes
|
|
endpoints = design.get('api_endpoints', [])
|
|
for endpoint in endpoints:
|
|
api_id = endpoint.get('id', '')
|
|
method = endpoint.get('method', 'GET')
|
|
path = endpoint.get('path', '')
|
|
|
|
result['stats']['apis_checked'] += 1
|
|
|
|
# Convert API path to file path
|
|
# /api/users -> app/api/users/route.ts
|
|
# /api/users/:id -> app/api/users/[id]/route.ts
|
|
route_path = path.replace('/api/', '').replace(':', '[').replace('/', '/')
|
|
if ':' in path:
|
|
# Handle path params like :id
|
|
parts = route_path.split('/')
|
|
parts = [f"[{p[1:]}]" if p.startswith('[') else p for p in parts]
|
|
route_path = '/'.join(parts)
|
|
|
|
route_file = f"app/api/{route_path}/route.ts"
|
|
route_file_alt = f"app/api/{route_path}/route.tsx"
|
|
pages_route = f"pages/api/{route_path}.ts"
|
|
|
|
found_route = None
|
|
for rf in [route_file, route_file_alt, pages_route]:
|
|
if os.path.exists(rf):
|
|
found_route = rf
|
|
break
|
|
|
|
if found_route:
|
|
try:
|
|
with open(found_route, 'r') as f:
|
|
content = f.read()
|
|
|
|
# Check HTTP method is exported
|
|
method_patterns = {
|
|
'GET': r'export\s+(async\s+)?function\s+GET',
|
|
'POST': r'export\s+(async\s+)?function\s+POST',
|
|
'PUT': r'export\s+(async\s+)?function\s+PUT',
|
|
'PATCH': r'export\s+(async\s+)?function\s+PATCH',
|
|
'DELETE': r'export\s+(async\s+)?function\s+DELETE',
|
|
}
|
|
|
|
pattern = method_patterns.get(method)
|
|
if pattern and re.search(pattern, content):
|
|
result['passed'].append(f"✅ {api_id}: {method} handler found at {found_route}")
|
|
else:
|
|
result['errors'].append(
|
|
f"❌ {api_id}: {method} handler not found in {found_route}"
|
|
)
|
|
|
|
# Check types imported from @/types/api-types
|
|
if "@/types/api-types" in content or "api-types" in content:
|
|
result['passed'].append(f"✅ {api_id}: API types imported")
|
|
else:
|
|
result['warnings'].append(
|
|
f"⚠️ {api_id}: API types not imported from @/types/api-types"
|
|
)
|
|
|
|
except Exception as e:
|
|
result['errors'].append(f"Error reading {found_route}: {str(e)}")
|
|
else:
|
|
result['errors'].append(f"❌ {api_id}: Route file not found for {method} {path}")
|
|
|
|
# Check Prisma models
|
|
models = design.get('data_models', [])
|
|
prisma_schema = Path('prisma/schema.prisma')
|
|
|
|
if prisma_schema.exists():
|
|
try:
|
|
with open(prisma_schema, 'r') as f:
|
|
prisma_content = f.read()
|
|
|
|
for model in models:
|
|
model_id = model.get('id', '')
|
|
model_name = model.get('name', '')
|
|
fields = model.get('fields', [])
|
|
|
|
result['stats']['models_checked'] += 1
|
|
|
|
# Check model exists
|
|
if f"model {model_name}" in prisma_content:
|
|
result['passed'].append(f"✅ {model_id}: Model {model_name} exists in Prisma schema")
|
|
|
|
# Check fields exist
|
|
for field in fields:
|
|
field_name = field.get('name', '')
|
|
if field_name and field_name in prisma_content:
|
|
result['passed'].append(f"✅ {model_id}: Field '{field_name}' present")
|
|
else:
|
|
result['errors'].append(
|
|
f"❌ {model_id}: Field '{field_name}' missing from Prisma schema"
|
|
)
|
|
else:
|
|
result['errors'].append(f"❌ {model_id}: Model {model_name} not found in Prisma schema")
|
|
|
|
except Exception as e:
|
|
result['errors'].append(f"Error reading Prisma schema: {str(e)}")
|
|
else:
|
|
result['warnings'].append("⚠️ prisma/schema.prisma not found - skipping model validation")
|
|
|
|
return result
|
|
|
|
|
|
def show_validation_checklist(result: dict):
|
|
"""Display validation checklist."""
|
|
print()
|
|
print("╔" + "═" * 58 + "╗")
|
|
print("║" + "IMPLEMENTATION VALIDATION CHECKLIST".center(58) + "║")
|
|
print("╠" + "═" * 58 + "╣")
|
|
|
|
# Stats
|
|
stats = result.get('stats', {})
|
|
print("║" + f" Components checked: {stats.get('components_checked', 0)}".ljust(58) + "║")
|
|
print("║" + f" APIs checked: {stats.get('apis_checked', 0)}".ljust(58) + "║")
|
|
print("║" + f" Models checked: {stats.get('models_checked', 0)}".ljust(58) + "║")
|
|
|
|
# Passed
|
|
passed = result.get('passed', [])
|
|
if passed:
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + f" PASSED ({len(passed)})".ljust(58) + "║")
|
|
print("╠" + "─" * 58 + "╣")
|
|
for p in passed[:10]: # Limit output
|
|
print("║" + f" {p[:54]}".ljust(58) + "║")
|
|
if len(passed) > 10:
|
|
print("║" + f" ... and {len(passed) - 10} more".ljust(58) + "║")
|
|
|
|
# Warnings
|
|
warnings = result.get('warnings', [])
|
|
if warnings:
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + f" WARNINGS ({len(warnings)})".ljust(58) + "║")
|
|
print("╠" + "─" * 58 + "╣")
|
|
for w in warnings[:5]:
|
|
print("║" + f" {w[:54]}".ljust(58) + "║")
|
|
if len(warnings) > 5:
|
|
print("║" + f" ... and {len(warnings) - 5} more".ljust(58) + "║")
|
|
|
|
# Errors
|
|
errors = result.get('errors', [])
|
|
if errors:
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + f" ERRORS ({len(errors)})".ljust(58) + "║")
|
|
print("╠" + "─" * 58 + "╣")
|
|
for e in errors:
|
|
print("║" + f" {e[:54]}".ljust(58) + "║")
|
|
|
|
# Summary
|
|
print("╠" + "═" * 58 + "╣")
|
|
total_passed = len(passed)
|
|
total_warnings = len(warnings)
|
|
total_errors = len(errors)
|
|
|
|
if total_errors == 0:
|
|
print("║" + " ✅ VALIDATION PASSED".ljust(58) + "║")
|
|
else:
|
|
print("║" + " ❌ VALIDATION FAILED".ljust(58) + "║")
|
|
|
|
print("║" + f" Passed: {total_passed} | Warnings: {total_warnings} | Errors: {total_errors}".ljust(58) + "║")
|
|
print("╚" + "═" * 58 + "╝")
|
|
|
|
|
|
def show_status(state: dict):
|
|
"""Display workflow status."""
|
|
print()
|
|
print("╔" + "═" * 58 + "╗")
|
|
print("║" + "WORKFLOW STATUS".center(58) + "║")
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + f" ID: {state['id']}".ljust(58) + "║")
|
|
print("║" + f" Feature: {state['feature'][:45]}".ljust(58) + "║")
|
|
print("║" + f" Phase: {state['current_phase']}".ljust(58) + "║")
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + " APPROVAL GATES".ljust(58) + "║")
|
|
|
|
design_gate = state['gates']['design_approval']
|
|
impl_gate = state['gates']['implementation_approval']
|
|
|
|
design_icon = "✅" if design_gate['status'] == 'approved' else "❌" if design_gate['status'] == 'rejected' else "⏳"
|
|
impl_icon = "✅" if impl_gate['status'] == 'approved' else "❌" if impl_gate['status'] == 'rejected' else "⏳"
|
|
|
|
print("║" + f" {design_icon} Design: {design_gate['status']}".ljust(58) + "║")
|
|
print("║" + f" {impl_icon} Implementation: {impl_gate['status']}".ljust(58) + "║")
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + " PROGRESS".ljust(58) + "║")
|
|
|
|
p = state['progress']
|
|
print("║" + f" Entities Designed: {p['entities_designed']}".ljust(58) + "║")
|
|
print("║" + f" Tasks Created: {p['tasks_created']}".ljust(58) + "║")
|
|
print("║" + f" Tasks Implemented: {p['tasks_implemented']}".ljust(58) + "║")
|
|
print("║" + f" Tasks Reviewed: {p['tasks_reviewed']}".ljust(58) + "║")
|
|
print("║" + f" Tasks Completed: {p['tasks_completed']}".ljust(58) + "║")
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + " TASK BREAKDOWN".ljust(58) + "║")
|
|
|
|
t = state['tasks']
|
|
print("║" + f" ⏳ Pending: {len(t['pending'])}".ljust(58) + "║")
|
|
print("║" + f" 🔄 In Progress: {len(t['in_progress'])}".ljust(58) + "║")
|
|
print("║" + f" 🔍 Review: {len(t['review'])}".ljust(58) + "║")
|
|
print("║" + f" ✅ Approved: {len(t['approved'])}".ljust(58) + "║")
|
|
print("║" + f" ✓ Completed: {len(t['completed'])}".ljust(58) + "║")
|
|
print("║" + f" 🚫 Blocked: {len(t['blocked'])}".ljust(58) + "║")
|
|
|
|
if state['last_error']:
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + " ⚠️ LAST ERROR".ljust(58) + "║")
|
|
print("║" + f" {state['last_error'][:52]}".ljust(58) + "║")
|
|
|
|
print("╠" + "═" * 58 + "╣")
|
|
print("║" + " TIMESTAMPS".ljust(58) + "║")
|
|
print("║" + f" Started: {state['started_at'][:19]}".ljust(58) + "║")
|
|
print("║" + f" Updated: {state['updated_at'][:19]}".ljust(58) + "║")
|
|
if state['completed_at']:
|
|
print("║" + f" Completed: {state['completed_at'][:19]}".ljust(58) + "║")
|
|
print("╚" + "═" * 58 + "╝")
|
|
|
|
|
|
# ============================================================================
|
|
# CLI Interface
|
|
# ============================================================================
|
|
|
|
def main():
|
|
parser = argparse.ArgumentParser(description="Workflow state management")
|
|
subparsers = parser.add_subparsers(dest='command', help='Commands')
|
|
|
|
# create command
|
|
create_parser = subparsers.add_parser('create', help='Create new workflow')
|
|
create_parser.add_argument('feature', help='Feature to implement')
|
|
|
|
# status command
|
|
subparsers.add_parser('status', help='Show workflow status')
|
|
|
|
# transition command
|
|
trans_parser = subparsers.add_parser('transition', help='Transition to new phase')
|
|
trans_parser.add_argument('phase', choices=PHASES, help='Target phase')
|
|
trans_parser.add_argument('--error', help='Error message (for FAILED phase)')
|
|
|
|
# approve command
|
|
approve_parser = subparsers.add_parser('approve', help='Approve a gate')
|
|
approve_parser.add_argument('gate', choices=['design', 'implementation'], help='Gate to approve')
|
|
approve_parser.add_argument('--approver', default='user', help='Approver name')
|
|
|
|
# reject command
|
|
reject_parser = subparsers.add_parser('reject', help='Reject a gate')
|
|
reject_parser.add_argument('gate', choices=['design', 'implementation'], help='Gate to reject')
|
|
reject_parser.add_argument('reason', help='Rejection reason')
|
|
|
|
# progress command
|
|
progress_parser = subparsers.add_parser('progress', help='Update progress')
|
|
progress_parser.add_argument('--entities', type=int, help='Entities designed')
|
|
progress_parser.add_argument('--tasks-created', type=int, help='Tasks created')
|
|
progress_parser.add_argument('--tasks-impl', type=int, help='Tasks implemented')
|
|
progress_parser.add_argument('--tasks-reviewed', type=int, help='Tasks reviewed')
|
|
progress_parser.add_argument('--tasks-completed', type=int, help='Tasks completed')
|
|
|
|
# task command
|
|
task_parser = subparsers.add_parser('task', help='Update task status')
|
|
task_parser.add_argument('task_id', help='Task ID')
|
|
task_parser.add_argument('status', choices=['pending', 'in_progress', 'review', 'approved', 'completed', 'blocked'])
|
|
|
|
# archive command
|
|
archive_parser = subparsers.add_parser('archive', help='Archive workflow')
|
|
archive_parser.add_argument('--suffix', default='', help='Filename suffix (e.g., _aborted)')
|
|
|
|
# exists command
|
|
subparsers.add_parser('exists', help='Check if workflow exists')
|
|
|
|
# checkpoint command
|
|
checkpoint_parser = subparsers.add_parser('checkpoint', help='Manage checkpoints')
|
|
checkpoint_parser.add_argument('action', choices=['save', 'list', 'restore', 'clear'],
|
|
help='Checkpoint action')
|
|
checkpoint_parser.add_argument('--description', '-d', help='Checkpoint description (for save)')
|
|
checkpoint_parser.add_argument('--id', help='Checkpoint ID (for restore)')
|
|
checkpoint_parser.add_argument('--data', help='JSON data to store (for save)')
|
|
|
|
# validate command
|
|
validate_parser = subparsers.add_parser('validate', help='Validate implementation against design')
|
|
validate_parser.add_argument('--checklist', action='store_true',
|
|
help='Show detailed validation checklist')
|
|
validate_parser.add_argument('--json', action='store_true',
|
|
help='Output result as JSON')
|
|
|
|
# checklist command (alias for validate --checklist)
|
|
checklist_parser = subparsers.add_parser('checklist', help='Show implementation checklist')
|
|
checklist_parser.add_argument('action', choices=['show'], nargs='?', default='show',
|
|
help='Checklist action')
|
|
|
|
args = parser.parse_args()
|
|
|
|
if args.command == 'create':
|
|
state = create_workflow(args.feature)
|
|
print(f"Created workflow: {state['id']}")
|
|
print(f"Feature: {args.feature}")
|
|
print(f"State saved to: {get_current_state_path()}")
|
|
|
|
elif args.command == 'status':
|
|
state = load_current_workflow()
|
|
if state:
|
|
show_status(state)
|
|
else:
|
|
print("No active workflow found.")
|
|
print("Start a new workflow with: /workflow:spawn <feature>")
|
|
|
|
elif args.command == 'transition':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
if transition_phase(state, args.phase, args.error):
|
|
print(f"Transitioned to: {args.phase}")
|
|
else:
|
|
sys.exit(1)
|
|
|
|
elif args.command == 'approve':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
gate = f"{args.gate}_approval"
|
|
if approve_gate(state, gate, args.approver):
|
|
print(f"Approved: {args.gate}")
|
|
|
|
elif args.command == 'reject':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
gate = f"{args.gate}_approval"
|
|
if reject_gate(state, gate, args.reason):
|
|
print(f"Rejected: {args.gate}")
|
|
print(f"Reason: {args.reason}")
|
|
|
|
elif args.command == 'progress':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
updates = {}
|
|
if args.entities is not None:
|
|
updates['entities_designed'] = args.entities
|
|
if args.tasks_created is not None:
|
|
updates['tasks_created'] = args.tasks_created
|
|
if args.tasks_impl is not None:
|
|
updates['tasks_implemented'] = args.tasks_impl
|
|
if args.tasks_reviewed is not None:
|
|
updates['tasks_reviewed'] = args.tasks_reviewed
|
|
if args.tasks_completed is not None:
|
|
updates['tasks_completed'] = args.tasks_completed
|
|
if updates:
|
|
update_progress(state, **updates)
|
|
print("Progress updated")
|
|
|
|
elif args.command == 'task':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
update_task_status(state, args.task_id, args.status)
|
|
print(f"Task {args.task_id} → {args.status}")
|
|
|
|
elif args.command == 'archive':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
archive_workflow(state, args.suffix)
|
|
print(f"Workflow archived to: {get_history_dir()}/{state['id']}{args.suffix}.yml")
|
|
|
|
elif args.command == 'exists':
|
|
state = load_current_workflow()
|
|
if state:
|
|
print("true")
|
|
sys.exit(0)
|
|
else:
|
|
print("false")
|
|
sys.exit(1)
|
|
|
|
elif args.command == 'checkpoint':
|
|
state = load_current_workflow()
|
|
if not state:
|
|
print("Error: No active workflow")
|
|
sys.exit(1)
|
|
|
|
if args.action == 'save':
|
|
if not args.description:
|
|
print("Error: --description required for save")
|
|
sys.exit(1)
|
|
data = None
|
|
if args.data:
|
|
try:
|
|
data = json.loads(args.data)
|
|
except json.JSONDecodeError:
|
|
print("Error: --data must be valid JSON")
|
|
sys.exit(1)
|
|
checkpoint = save_checkpoint(state, args.description, data)
|
|
print(f"Checkpoint saved: {checkpoint['id']}")
|
|
print(f"Description: {args.description}")
|
|
|
|
elif args.action == 'list':
|
|
checkpoints = list_checkpoints(state)
|
|
if not checkpoints:
|
|
print("No checkpoints available")
|
|
else:
|
|
print("\n" + "=" * 60)
|
|
print("CHECKPOINTS".center(60))
|
|
print("=" * 60)
|
|
for cp in checkpoints:
|
|
print(f"\n ID: {cp['id']}")
|
|
print(f" Time: {cp['timestamp'][:19]}")
|
|
print(f" Phase: {cp['phase']}")
|
|
print(f" Description: {cp['description']}")
|
|
print("\n" + "=" * 60)
|
|
|
|
elif args.action == 'restore':
|
|
if restore_from_checkpoint(state, args.id):
|
|
print("Workflow state restored successfully")
|
|
else:
|
|
sys.exit(1)
|
|
|
|
elif args.action == 'clear':
|
|
clear_checkpoints(state)
|
|
print("All checkpoints cleared")
|
|
|
|
elif args.command == 'validate':
|
|
result = validate_implementation(args.checklist)
|
|
if args.json:
|
|
print(json.dumps(result, indent=2))
|
|
sys.exit(0 if not result['errors'] else 1)
|
|
show_validation_checklist(result)
|
|
sys.exit(0 if not result['errors'] else 1)
|
|
|
|
elif args.command == 'checklist':
|
|
result = validate_implementation(show_checklist=True)
|
|
show_validation_checklist(result)
|
|
sys.exit(0 if not result['errors'] else 1)
|
|
|
|
else:
|
|
parser.print_help()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|