#!/usr/bin/env python3
"""
Tailwind Site Rebuilder
-----------------------
Analyzes websites and creates a blueprint for rebuilding with Tailwind CSS.
"""

import os
import sys
import argparse
import re
from urllib.parse import urlparse

# Ensure we can import from our lib directory
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "lib"))

from analyzer import create_simulation

def get_domain_from_url(url):
    """Extract a clean domain name from a URL"""
    # Add scheme if missing
    if not url.startswith(('http://', 'https://')):
        url = 'https://' + url
        
    # Parse the URL to get the domain
    parsed_url = urlparse(url)
    domain = parsed_url.netloc
    
    # Remove www. prefix if present
    if domain.startswith('www.'):
        domain = domain[4:]
        
    # Clean domain name to be a valid directory name
    # Replace dots and other invalid characters with underscores
    clean_domain = re.sub(r'[^\w\-]', '_', domain)
    
    return clean_domain

def main():
    """Parse arguments and run the site rebuilder"""
    print("""
╔════════════════════════════════════════════════════════╗
║ Tailwind Site Rebuilder                                ║
║ Analyze websites for Tailwind CSS rebuilding           ║
╚════════════════════════════════════════════════════════╝
    """)
    
    parser = argparse.ArgumentParser(description="Analyze websites for Tailwind CSS rebuilding")
    parser.add_argument("url", help="URL of the website to analyze")
    parser.add_argument("--output", "-o", help="Output directory (defaults to domain name)")
    
    args = parser.parse_args()
    
    # Use domain name as default output directory
    domain = get_domain_from_url(args.url)
    output_dir = args.output if args.output else os.path.join("sites", domain)
    
    # Ensure output directory exists
    os.makedirs(os.path.join(output_dir, "screenshots"), exist_ok=True)
    
    print(f"Analyzing {args.url}")
    print(f"Output will be saved to {output_dir}")
    create_simulation(args.url, output_dir)
    
    # Paths to output directories
    output_abs_path = os.path.abspath(output_dir)
    content_path = os.path.join(output_abs_path, "content")
    images_path = os.path.join(content_path, "images")
    text_path = os.path.join(content_path, "text")
    
    print(f"\nAnalysis complete! Results organized in directories:")
    print(f"- Main output: {output_abs_path}")
    print(f"- Screenshots: {os.path.join(output_abs_path, 'screenshots')}")
    print(f"- JSON context: {os.path.join(output_abs_path, 'rebuild_context.json')}")
    print(f"- Content directory: {content_path}")
    print(f"  ├─ Images: {images_path}")
    print(f"  └─ Text: {text_path}")

if __name__ == "__main__":
    main() 