project-standalo-sonic-cloud/skills/guardrail-orchestrator/scripts/visualize_implementation.py

652 lines
23 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env python3
"""
Implementation Visualizer for guardrail workflow.
Generates visual representation of implemented pages and components:
- Component tree structure
- Props and interfaces
- Page layouts
- API endpoints
- File statistics
Usage:
python3 visualize_implementation.py --manifest project_manifest.json
python3 visualize_implementation.py --tasks-dir .workflow/versions/v001/tasks
"""
import argparse
import json
import os
import re
import sys
from pathlib import Path
from dataclasses import dataclass, field
from typing import Optional
# Try to import yaml
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
@dataclass
class ComponentInfo:
"""Parsed component information."""
name: str
file_path: str
props: list[str] = field(default_factory=list)
imports: list[str] = field(default_factory=list)
exports: list[str] = field(default_factory=list)
hooks: list[str] = field(default_factory=list)
children: list[str] = field(default_factory=list)
lines: int = 0
has_types: bool = False
status: str = "IMPLEMENTED"
@dataclass
class PageInfo:
"""Parsed page information."""
name: str
file_path: str
route: str
components: list[str] = field(default_factory=list)
api_calls: list[str] = field(default_factory=list)
lines: int = 0
is_client: bool = False
is_server: bool = True
@dataclass
class APIEndpointInfo:
"""Parsed API endpoint information."""
name: str
file_path: str
route: str
methods: list[str] = field(default_factory=list)
has_auth: bool = False
has_validation: bool = False
lines: int = 0
def load_yaml(filepath: str) -> dict:
"""Load YAML file."""
if not os.path.exists(filepath):
return {}
with open(filepath, 'r') as f:
content = f.read()
if HAS_YAML:
return yaml.safe_load(content) or {}
# Basic fallback
result = {}
for line in content.split('\n'):
if ':' in line and not line.startswith(' '):
key, _, value = line.partition(':')
result[key.strip()] = value.strip()
return result
def load_manifest(manifest_path: str) -> dict:
"""Load project manifest."""
if not os.path.exists(manifest_path):
return {}
try:
with open(manifest_path) as f:
return json.load(f)
except (json.JSONDecodeError, IOError):
return {}
def parse_typescript_file(file_path: str) -> dict:
"""Parse TypeScript/TSX file for component information."""
if not os.path.exists(file_path):
return {'exists': False}
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
lines = content.split('\n')
except (IOError, UnicodeDecodeError):
return {'exists': False}
result = {
'exists': True,
'lines': len(lines),
'imports': [],
'exports': [],
'props': [],
'hooks': [],
'components_used': [],
'api_calls': [],
'is_client': "'use client'" in content or '"use client"' in content,
'has_types': 'interface ' in content or 'type ' in content,
'methods': [],
}
# Extract imports
import_pattern = r"import\s+(?:{[^}]+}|\w+)\s+from\s+['\"]([^'\"]+)['\"]"
for match in re.finditer(import_pattern, content):
result['imports'].append(match.group(1))
# Extract exports
export_patterns = [
r"export\s+(?:default\s+)?(?:function|const|class)\s+(\w+)",
r"export\s+{\s*([^}]+)\s*}",
]
for pattern in export_patterns:
for match in re.finditer(pattern, content):
exports = match.group(1).split(',')
result['exports'].extend([e.strip() for e in exports if e.strip()])
# Extract props interface
props_pattern = r"(?:interface|type)\s+(\w*Props\w*)\s*(?:=|{)"
for match in re.finditer(props_pattern, content):
result['props'].append(match.group(1))
# Extract React hooks
hooks_pattern = r"\b(use[A-Z]\w+)\s*\("
for match in re.finditer(hooks_pattern, content):
hook = match.group(1)
if hook not in result['hooks']:
result['hooks'].append(hook)
# Extract component usage (JSX)
component_pattern = r"<([A-Z]\w+)(?:\s|/|>)"
for match in re.finditer(component_pattern, content):
comp = match.group(1)
if comp not in result['components_used'] and comp not in ['React', 'Fragment']:
result['components_used'].append(comp)
# Extract API calls
api_patterns = [
r"fetch\s*\(\s*['\"`](/api/[^'\"`]+)['\"`]",
r"axios\.\w+\s*\(\s*['\"`](/api/[^'\"`]+)['\"`]",
]
for pattern in api_patterns:
for match in re.finditer(pattern, content):
result['api_calls'].append(match.group(1))
# Extract HTTP methods (for API routes)
method_pattern = r"export\s+(?:async\s+)?function\s+(GET|POST|PUT|DELETE|PATCH)"
for match in re.finditer(method_pattern, content):
result['methods'].append(match.group(1))
return result
def get_route_from_path(file_path: str) -> str:
"""Convert file path to route."""
# Handle Next.js App Router
if '/app/' in file_path:
route = file_path.split('/app/')[-1]
route = re.sub(r'/page\.(tsx?|jsx?)$', '', route)
route = re.sub(r'/route\.(tsx?|jsx?)$', '', route)
route = '/' + route if route else '/'
# Handle dynamic routes
route = re.sub(r'\[(\w+)\]', r':\1', route)
return route
# Handle Pages Router
if '/pages/' in file_path:
route = file_path.split('/pages/')[-1]
route = re.sub(r'\.(tsx?|jsx?)$', '', route)
route = re.sub(r'/index$', '', route)
route = '/' + route if route else '/'
route = re.sub(r'\[(\w+)\]', r':\1', route)
return route
return file_path
def visualize_component(info: ComponentInfo, indent: str = "") -> list[str]:
"""Generate ASCII visualization for a component."""
lines = []
status_icon = {
'IMPLEMENTED': '🟢',
'PENDING': '',
'IN_PROGRESS': '🔄',
'ERROR': '',
}.get(info.status, '')
# Component header
lines.append(f"{indent}{'' * 60}")
lines.append(f"{indent}{status_icon} COMPONENT: {info.name:<46}")
lines.append(f"{indent}│ 📁 {info.file_path:<52}")
lines.append(f"{indent}│ 📏 {info.lines} lines │"[:63] + "")
# Props
if info.props:
lines.append(f"{indent}{'' * 60}")
lines.append(f"{indent}│ PROPS │")
for prop in info.props[:3]:
lines.append(f"{indent}│ • {prop:<54}")
# Hooks
if info.hooks:
lines.append(f"{indent}{'' * 60}")
lines.append(f"{indent}│ HOOKS │")
hooks_str = ', '.join(info.hooks[:5])
if len(hooks_str) > 52:
hooks_str = hooks_str[:49] + '...'
lines.append(f"{indent}{hooks_str:<56}")
# Children components
if info.children:
lines.append(f"{indent}{'' * 60}")
lines.append(f"{indent}│ USES COMPONENTS │")
for child in info.children[:5]:
lines.append(f"{indent}│ └── {child:<52}")
lines.append(f"{indent}{'' * 60}")
return lines
def visualize_page(info: PageInfo, indent: str = "") -> list[str]:
"""Generate ASCII visualization for a page."""
lines = []
client_icon = "🖥️" if info.is_client else "🌐"
# Page header
lines.append(f"{indent}{'' * 62}")
lines.append(f"{indent}{client_icon} PAGE: {info.name:<52}")
lines.append(f"{indent}║ Route: {info.route:<51}")
lines.append(f"{indent}║ File: {info.file_path:<51}")
lines.append(f"{indent}{'' * 62}")
# Components used
if info.components:
lines.append(f"{indent}║ COMPONENTS USED ║")
for comp in info.components[:6]:
lines.append(f"{indent}║ ├── {comp:<54}")
if len(info.components) > 6:
lines.append(f"{indent}║ └── ... and {len(info.components) - 6} more ║"[:65] + "")
else:
lines.append(f"{indent}║ (No child components detected) ║")
# API calls
if info.api_calls:
lines.append(f"{indent}{'' * 62}")
lines.append(f"{indent}║ API CALLS ║")
for api in info.api_calls[:4]:
api_short = api[:50] if len(api) <= 50 else api[:47] + '...'
lines.append(f"{indent}║ 🔌 {api_short:<55}")
lines.append(f"{indent}{'' * 62}")
return lines
def visualize_api_endpoint(info: APIEndpointInfo, indent: str = "") -> list[str]:
"""Generate ASCII visualization for an API endpoint."""
lines = []
method_colors = {
'GET': '🟢',
'POST': '🟡',
'PUT': '🟠',
'PATCH': '🟠',
'DELETE': '🔴',
}
methods_str = ' '.join([f"{method_colors.get(m, '')}{m}" for m in info.methods])
lines.append(f"{indent}{'' * 60}")
lines.append(f"{indent}│ 🔌 API: {info.route:<50}")
lines.append(f"{indent}│ Methods: {methods_str:<47}"[:63] + "")
lines.append(f"{indent}│ File: {info.file_path:<50}")
features = []
if info.has_auth:
features.append("🔐 Auth")
if info.has_validation:
features.append("✓ Validation")
if features:
features_str = ' '.join(features)
lines.append(f"{indent}│ Features: {features_str:<46}")
lines.append(f"{indent}{'' * 60}")
return lines
def generate_implementation_tree(components: list[ComponentInfo]) -> list[str]:
"""Generate a tree view of component hierarchy."""
lines = []
lines.append("")
lines.append("🌳 COMPONENT HIERARCHY")
lines.append("" * 65)
if not components:
lines.append(" (No components found)")
return lines
# Group by directory
by_dir: dict[str, list[ComponentInfo]] = {}
for comp in components:
dir_path = str(Path(comp.file_path).parent)
if dir_path not in by_dir:
by_dir[dir_path] = []
by_dir[dir_path].append(comp)
for dir_path, comps in sorted(by_dir.items()):
lines.append(f" 📂 {dir_path}/")
for i, comp in enumerate(comps):
is_last = i == len(comps) - 1
prefix = " └──" if is_last else " ├──"
status = "🟢" if comp.status == "IMPLEMENTED" else ""
lines.append(f" {prefix} {status} {comp.name}")
# Show props
if comp.props:
prop_prefix = " " if is_last else ""
for prop in comp.props[:2]:
lines.append(f"{prop_prefix} 📋 {prop}")
return lines
def generate_stats(
pages: list[PageInfo],
components: list[ComponentInfo],
endpoints: list[APIEndpointInfo]
) -> list[str]:
"""Generate implementation statistics."""
lines = []
total_lines = sum(p.lines for p in pages) + sum(c.lines for c in components) + sum(e.lines for e in endpoints)
client_pages = sum(1 for p in pages if p.is_client)
server_pages = len(pages) - client_pages
typed_components = sum(1 for c in components if c.has_types)
lines.append("")
lines.append("╔══════════════════════════════════════════════════════════════════╗")
lines.append("║ 📊 IMPLEMENTATION STATS ║")
lines.append("╠══════════════════════════════════════════════════════════════════╣")
lines.append(f"║ Pages: {len(pages):<5} │ Client: {client_pages:<3} │ Server: {server_pages:<3}")
lines.append(f"║ Components: {len(components):<5} │ Typed: {typed_components:<4}")
lines.append(f"║ API Endpoints: {len(endpoints):<5}")
lines.append(f"║ Total Lines: {total_lines:<5}")
lines.append("╠══════════════════════════════════════════════════════════════════╣")
# Hooks usage
all_hooks = []
for comp in components:
all_hooks.extend(comp.hooks)
hook_counts = {}
for hook in all_hooks:
hook_counts[hook] = hook_counts.get(hook, 0) + 1
if hook_counts:
lines.append("║ HOOKS USAGE ║")
for hook, count in sorted(hook_counts.items(), key=lambda x: -x[1])[:5]:
lines.append(f"{hook:<20} × {count:<3}"[:69] + "")
lines.append("╚══════════════════════════════════════════════════════════════════╝")
return lines
def generate_page_flow(pages: list[PageInfo]) -> list[str]:
"""Generate page flow visualization."""
lines = []
if not pages:
return lines
lines.append("")
lines.append("📱 PAGE STRUCTURE")
lines.append("" * 65)
# Sort by route
sorted_pages = sorted(pages, key=lambda p: p.route)
for i, page in enumerate(sorted_pages):
is_last = i == len(sorted_pages) - 1
icon = "🖥️" if page.is_client else "🌐"
# Page box
lines.append(f"{'' * 50}")
lines.append(f"{icon} {page.route:<47}")
lines.append(f"{page.name:<48}")
# Components count
comp_count = len(page.components)
api_count = len(page.api_calls)
lines.append(f" │ 🧩 {comp_count} components 🔌 {api_count} API calls │"[:56] + "")
lines.append(f"{'' * 50}")
if not is_last:
lines.append("")
lines.append("")
return lines
def visualize_from_manifest(manifest_path: str) -> str:
"""Generate full visualization from manifest."""
manifest = load_manifest(manifest_path)
if not manifest:
return "❌ Could not load manifest"
entities = manifest.get('entities', {})
project_name = manifest.get('project', {}).get('name', 'Unknown')
lines = []
# Header
lines.append("")
lines.append("╔═══════════════════════════════════════════════════════════════════╗")
lines.append("║ 🏗️ IMPLEMENTATION VISUALIZATION ║")
lines.append(f"║ Project: {project_name:<56}")
lines.append("╚═══════════════════════════════════════════════════════════════════╝")
pages: list[PageInfo] = []
components: list[ComponentInfo] = []
endpoints: list[APIEndpointInfo] = []
# Parse pages
for page_data in entities.get('pages', []):
file_path = page_data.get('file_path', '')
if file_path and os.path.exists(file_path):
parsed = parse_typescript_file(file_path)
page = PageInfo(
name=page_data.get('name', 'Unknown'),
file_path=file_path,
route=get_route_from_path(file_path),
components=parsed.get('components_used', []),
api_calls=parsed.get('api_calls', []),
lines=parsed.get('lines', 0),
is_client=parsed.get('is_client', False),
)
pages.append(page)
# Parse components
for comp_data in entities.get('components', []):
file_path = comp_data.get('file_path', '')
if file_path and os.path.exists(file_path):
parsed = parse_typescript_file(file_path)
comp = ComponentInfo(
name=comp_data.get('name', 'Unknown'),
file_path=file_path,
props=parsed.get('props', []),
imports=parsed.get('imports', []),
exports=parsed.get('exports', []),
hooks=parsed.get('hooks', []),
children=parsed.get('components_used', []),
lines=parsed.get('lines', 0),
has_types=parsed.get('has_types', False),
status=comp_data.get('status', 'IMPLEMENTED'),
)
components.append(comp)
# Parse API endpoints
for api_data in entities.get('api_endpoints', []):
file_path = api_data.get('file_path', '')
if file_path and os.path.exists(file_path):
parsed = parse_typescript_file(file_path)
endpoint = APIEndpointInfo(
name=api_data.get('name', 'Unknown'),
file_path=file_path,
route=get_route_from_path(file_path),
methods=parsed.get('methods', ['GET']),
lines=parsed.get('lines', 0),
)
endpoints.append(endpoint)
# Page flow
lines.extend(generate_page_flow(pages))
# Detailed pages
if pages:
lines.append("")
lines.append("📄 PAGE DETAILS")
lines.append("" * 65)
for page in pages:
lines.extend(visualize_page(page))
lines.append("")
# Component hierarchy
lines.extend(generate_implementation_tree(components))
# Detailed components
if components:
lines.append("")
lines.append("🧩 COMPONENT DETAILS")
lines.append("" * 65)
for comp in components[:10]: # Limit to 10
lines.extend(visualize_component(comp))
lines.append("")
if len(components) > 10:
lines.append(f" ... and {len(components) - 10} more components")
# API endpoints
if endpoints:
lines.append("")
lines.append("🔌 API ENDPOINTS")
lines.append("" * 65)
for endpoint in endpoints:
lines.extend(visualize_api_endpoint(endpoint))
lines.append("")
# Stats
lines.extend(generate_stats(pages, components, endpoints))
# Legend
lines.append("")
lines.append("📋 LEGEND")
lines.append("" * 65)
lines.append(" 🟢 Implemented ⏳ Pending 🔄 In Progress ❌ Error")
lines.append(" 🖥️ Client Component 🌐 Server Component")
lines.append(" 🟢 GET 🟡 POST 🟠 PUT/PATCH 🔴 DELETE")
lines.append("")
return "\n".join(lines)
def visualize_from_tasks(tasks_dir: str) -> str:
"""Generate visualization from task files."""
tasks_path = Path(tasks_dir)
if not tasks_path.exists():
return f"❌ Tasks directory not found: {tasks_dir}"
task_files = list(tasks_path.glob('task_*.yml'))
if not task_files:
return f"❌ No task files found in: {tasks_dir}"
lines = []
lines.append("")
lines.append("╔═══════════════════════════════════════════════════════════════════╗")
lines.append("║ 📋 TASK IMPLEMENTATION STATUS ║")
lines.append("╚═══════════════════════════════════════════════════════════════════╝")
lines.append("")
implemented_files = []
for task_file in sorted(task_files):
task = load_yaml(str(task_file))
task_id = task.get('id', task_file.stem)
status = task.get('status', 'unknown')
title = task.get('title', 'Unknown task')
file_paths = task.get('file_paths', [])
status_icon = {
'completed': '',
'approved': '',
'pending': '',
'in_progress': '🔄',
'blocked': '🚫',
}.get(status, '')
lines.append(f" {status_icon} {task_id}")
lines.append(f" {title[:55]}")
for fp in file_paths:
if os.path.exists(fp):
lines.append(f" └── ✓ {fp}")
implemented_files.append(fp)
else:
lines.append(f" └── ✗ {fp} (missing)")
lines.append("")
# Parse and visualize implemented files
if implemented_files:
lines.append("" * 65)
lines.append("")
lines.append("🔍 IMPLEMENTED FILES ANALYSIS")
lines.append("")
for fp in implemented_files[:5]:
parsed = parse_typescript_file(fp)
if parsed.get('exists'):
name = Path(fp).stem
lines.append(f" 📁 {fp}")
lines.append(f" Lines: {parsed.get('lines', 0)}")
if parsed.get('exports'):
lines.append(f" Exports: {', '.join(parsed['exports'][:3])}")
if parsed.get('hooks'):
lines.append(f" Hooks: {', '.join(parsed['hooks'][:3])}")
if parsed.get('components_used'):
lines.append(f" Uses: {', '.join(parsed['components_used'][:3])}")
lines.append("")
return "\n".join(lines)
def main():
parser = argparse.ArgumentParser(description="Visualize implementation")
parser.add_argument('--manifest', help='Path to project_manifest.json')
parser.add_argument('--tasks-dir', help='Path to tasks directory')
parser.add_argument('--format', choices=['full', 'tree', 'stats', 'pages'],
default='full', help='Output format')
args = parser.parse_args()
if args.manifest:
output = visualize_from_manifest(args.manifest)
elif args.tasks_dir:
output = visualize_from_tasks(args.tasks_dir)
else:
# Auto-detect
if os.path.exists('project_manifest.json'):
output = visualize_from_manifest('project_manifest.json')
else:
output = "Usage: python3 visualize_implementation.py --manifest project_manifest.json"
print(output)
return 0
if __name__ == "__main__":
sys.exit(main())