"""
File I/O operations module.

This module handles file upload, download, and management operations
for both local filesystem and AWS S3 storage.
"""

import base64
import io
import logging
import os
import struct
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional, Tuple

import boto3
from botocore.exceptions import ClientError
from fastapi import File, UploadFile
from sqlalchemy.orm import Session

from src.apps.base.services import get_active_cdn
from src.core.config import settings
from src.core.exceptions import APIException
from src.core.utils.constants import MAX_UPLOAD_SIZE_MB
from src.core.utils.enums import FileSizeUnits
from src.apps.base.utils.functions import generate_secure_id

logger = logging.getLogger(__name__)


def create_uploads_directory() -> None:
    """
    Create the main uploads directory if it doesn't exist.
    
    Raises:
        Exception: If directory creation fails.
    """
    try:
        os.makedirs(settings.UPLOADS_DIR, exist_ok=True)
        logger.info(f"Uploads directory created or already exists at: {settings.UPLOADS_DIR}")
    except Exception as e:
        err_msg = (
            f"Failed to create uploads directory at {settings.UPLOADS_DIR} due to: {e}"
        )
        logger.error(err_msg)
        raise


def create_nested_upload_directory(path: str) -> None:
    """
    Create nested directory structure for file uploads.
    
    Args:
        path: The file path for which to create parent directories.
        
    Raises:
        Exception: If directory creation fails.
    """
    try:
        create_uploads_directory()
        nested_path = Path(path).parent
        nested_path.mkdir(parents=True, exist_ok=True)
        logger.info(f"Nested directory created or already exists at: {nested_path}")
    except Exception as e:
        err_msg = f"Failed to create nested directory for {path} due to: {e}"
        logger.error(err_msg)
        raise


def convert_filesize(size_in_bytes: int, unit: FileSizeUnits) -> float:
    """
    Convert file size from bytes to specified unit.
    
    Args:
        size_in_bytes: Size in bytes to convert.
        unit: Target unit for conversion.
        
    Returns:
        Converted size as float.
    """
    conversion_factors = {
        FileSizeUnits.KB: 1024,
        FileSizeUnits.MB: 1024 * 1024,
        FileSizeUnits.GB: 1024 * 1024 * 1024,
    }
    
    factor = conversion_factors.get(unit, 1)
    return size_in_bytes / factor


def get_physical_filesize(filepath: str, unit: FileSizeUnits = FileSizeUnits.MB) -> str:
    """
    Get the physical size of a file on the local filesystem.
    
    Args:
        filepath: Relative path to the file.
        unit: Unit for size measurement.
        
    Returns:
        File size as formatted string with unit.
    """
    try:
        file_path = os.path.join(os.getcwd(), settings.UPLOADS_DIR, filepath)
        size = os.path.getsize(file_path)
        converted_size = convert_filesize(size, unit=unit)
        unit_suffix = unit.value.lower() if hasattr(unit, 'value') else 'mb'
        return f"{converted_size:.2f}{unit_suffix}"
    except Exception as e:
        logger.error(f"Error getting file size for {filepath}: {e}")
        return "0kb"


def _format_file_size(size: int) -> str:
    """
    Format file size with appropriate unit.
    
    Args:
        size: Size in bytes.
        
    Returns:
        Formatted size string with appropriate unit.
    """
    if size < 1024:
        return f"{size} bytes"
    elif size < 1024 * 1024:
        return f"{size/1024:.2f} KB"
    elif size < 1024 * 1024 * 1024:
        return f"{size/(1024*1024):.2f} MB"
    else:
        return f"{size/(1024*1024*1024):.2f} GB"


def get_approximate_filesize(filepath: str, active_cdn=None) -> str:
    """
    Get approximate file size by downloading and measuring content.
    
    Args:
        filepath: Path to the file.
        active_cdn: Active CDN configuration.
        
    Returns:
        Formatted file size string.
    """
    try:
        if active_cdn and active_cdn.label.lower() == "s3":
            downloaded_file_bytes = download_file_from_s3(path=filepath)
        else:
            downloaded_file_bytes = download_file_from_system_path(path=filepath)
        
        if not downloaded_file_bytes:
            return "0 bytes"
            
        return _format_file_size(len(downloaded_file_bytes))
    except Exception as e:
        logger.error(f"Error getting approximate file size for {filepath}: {e}")
        return "0 bytes"


def _generate_unique_filename(original_filename: str) -> tuple[str, str, str]:
    """
    Generate a unique filename with timestamp and secure ID.
    
    Args:
        original_filename: Original filename from upload.
        
    Returns:
        Tuple of (unique_filename, basename, file_format).
    """
    cleaned_name = original_filename.replace(" ", "-")
    name_parts = cleaned_name.rsplit(".", 1)
    
    if len(name_parts) == 2:
        basename, file_format = name_parts
    else:
        basename, file_format = name_parts[0], "bin"
    
    suffix = f"{int(datetime.utcnow().timestamp())}"
    unique_id = generate_secure_id(prepend="file")
    unique_filename = f"{unique_id}_{suffix}.{file_format}"
    
    return unique_filename, basename, file_format


def _validate_file_size(contents: bytes) -> None:
    """
    Validate that file size doesn't exceed maximum allowed size.
    
    Args:
        contents: File content as bytes.
        
    Raises:
        APIException: If file size exceeds maximum allowed size.
    """
    max_size_bytes = MAX_UPLOAD_SIZE_MB * 1000000
    if len(contents) > max_size_bytes:
        raise APIException(
            module=__name__,
            error={},
            status_code=400,
            message=f"File exceeds maximum size limit of {MAX_UPLOAD_SIZE_MB}mb"
        )


def _determine_upload_paths(active_cdn, path: str, filename: str) -> tuple[str, str]:
    """
    Determine upload and serve paths based on CDN configuration.
    
    Args:
        active_cdn: Active CDN configuration.
        path: Upload path prefix.
        filename: Generated filename.
        
    Returns:
        Tuple of (serve_path, upload_path).
    """
    base_path = (
        active_cdn.upload_path if active_cdn 
        else f"{os.getcwd()}{settings.UPLOADS_DIR}"
    )
    serve_path = f"/{path}{filename}"
    upload_path = base_path + serve_path
    
    return serve_path, upload_path


def upload_file_single(
    db: Session, 
    file: UploadFile = File(...), 
    contents: bytes = b"", 
    path: str = ""
) -> Optional[Dict]:
    """
    Upload a single file to configured storage (local or S3).
    
    Args:
        db: Database session.
        file: FastAPI UploadFile object.
        contents: File contents as bytes.
        path: Upload path prefix.
        
    Returns:
        Dictionary containing file metadata or None if upload fails.
        
    Raises:
        APIException: If file upload fails.
    """
    _validate_file_size(contents)
    
    file_name, basename, file_format = _generate_unique_filename(file.filename)
    content_type = file.content_type
    
    active_cdn = get_active_cdn(db=db)
    serve_path, upload_path = _determine_upload_paths(active_cdn, path, file_name)
    
    uploaded = _perform_upload(active_cdn, contents, content_type, upload_path)
    
    if not uploaded:
        raise APIException(
            module=__name__, 
            error={}, 
            status_code=400, 
            message="Could not upload file."
        )
    
    file_size_mb = convert_filesize(len(contents), unit=FileSizeUnits.MB)
    
    return {
        "file_name": file_name,
        "original_name": basename,
        "format": file_format,
        "path": serve_path,
        "content_type": content_type,
        "filesize": f"{file_size_mb:.2f}mb",
    }


def _perform_upload(active_cdn, contents: bytes, content_type: str, upload_path: str) -> bool:
    """
    Perform the actual file upload based on CDN configuration.
    
    Args:
        active_cdn: Active CDN configuration.
        contents: File contents as bytes.
        content_type: MIME type of the file.
        upload_path: Full upload path.
        
    Returns:
        True if upload successful, False otherwise.
    """
    if active_cdn and active_cdn.label.lower() == "s3":
        return upload_file_to_s3(
            file_content=contents, 
            content_type=content_type, 
            path=upload_path
        )
    else:
        return upload_file_to_system_path(file_content=contents, path=upload_path)


def upload_file_multiple(
    db: Session, 
    files: List[UploadFile] = File(...), 
    path: str = ""
) -> List[Dict]:
    """
    Upload multiple files to configured storage.
    
    Args:
        db: Database session.
        files: List of FastAPI UploadFile objects.
        path: Upload path prefix.
        
    Returns:
        List of dictionaries containing file metadata.
        
    Raises:
        APIException: If any file upload fails or exceeds size limit.
    """
    uploaded_files = []
    
    for file in files:
        contents = file.file.read()
        _validate_file_size(contents)
        
        uploaded_file = upload_file_single(db, file, contents, path)
        uploaded_files.append(uploaded_file)
    
    return uploaded_files


def remove_file_single(path: str) -> None:
    """
    Remove a single file from the local filesystem.
    
    Args:
        path: Full path to the file to remove.
    """
    try:
        os.unlink(path)
        logger.info(f"Successfully removed file: {path}")
    except FileNotFoundError:
        logger.warning(f"File not found for removal: {path}")
    except Exception as e:
        logger.error(f"Failed to remove file {path}: {e}")


def _create_s3_client():
    """
    Create and return configured S3 client.
    
    Returns:
        Boto3 S3 client instance.
    """
    return boto3.client(
        service_name="s3",
        aws_access_key_id=settings.AWS_S3_ACCESS_KEY,
        aws_secret_access_key=settings.AWS_S3_SECRET_KEY,
        region_name="us-east-1",
    )


def _sanitize_s3_path(path: str) -> str:
    """
    Remove leading slash from S3 path if present.
    
    Args:
        path: Original path.
        
    Returns:
        Sanitized path without leading slash.
    """
    return path[1:] if path.startswith("/") else path


def upload_file_to_s3(
    file_content: bytes,
    content_type: str,
    path: str = "",
    bucket: Optional[str] = None,
) -> bool:
    """
    Upload a file to AWS S3 bucket.
    
    Args:
        file_content: File content as bytes.
        content_type: MIME type of the file.
        path: S3 object key/path.
        bucket: S3 bucket name. If None, uses default from settings.
        
    Returns:
        True if upload successful, False otherwise.
    """
    if bucket is None:
        bucket = settings.AWS_S3_DEFAULT_BUCKET
        
    try:
        s3_client = _create_s3_client()
        sanitized_path = _sanitize_s3_path(path)
        
        s3_client.upload_fileobj(
            Fileobj=io.BytesIO(file_content),
            Bucket=bucket,
            Key=sanitized_path,
            ExtraArgs={"ContentType": content_type},
        )
        
        logger.info(f"Successfully uploaded file to S3: {sanitized_path}")
        return True
    except ClientError as e:
        logger.error(f"AWS S3 upload error: {e}")
        return False
    except Exception as e:
        logger.error(f"Unexpected error during S3 upload: {e}")
        return False


def upload_file_to_system_path(file_content: bytes, path: str) -> bool:
    """
    Upload a file to the local filesystem.
    
    Args:
        file_content: File content as bytes.
        path: Full filesystem path where file should be saved.
        
    Returns:
        True if upload successful, False otherwise.
    """
    try:
        logger.info(f"Attempting to upload to path: {path}")
        
        if path:
            create_nested_upload_directory(path)
        
        with open(path, "wb") as f:
            f.write(file_content)
        
        logger.info("File uploaded successfully to local filesystem")
        return True
    except Exception as e:
        logger.error(f"Local file upload error: {e}")
        return False


def download_file_from_s3(
    path: str, 
    bucket: Optional[str] = None
) -> Optional[bytes]:
    """
    Download a file from AWS S3 bucket.
    
    Args:
        path: S3 object key/path.
        bucket: S3 bucket name.
        
    Returns:
        File content as bytes, or None if download fails.
    """
    try:
        s3_client = _create_s3_client()
        sanitized_path = _sanitize_s3_path(path)
        
        if bucket is None:
            bucket = settings.AWS_S3_DEFAULT_BUCKET
        
        buffer = io.BytesIO()
        s3_client.download_fileobj(bucket, sanitized_path, buffer)
        
        logger.info(f"Successfully downloaded file from S3: {sanitized_path}")
        return buffer.getvalue()
    except Exception as e:
        logger.error(f"Could not fetch file from S3 {path}: {e}")
        return None


def download_file_from_system_path(path: str) -> Optional[bytes]:
    """
    Download a file from the local filesystem.
    
    Args:
        path: Full filesystem path to the file.
        
    Returns:
        File content as bytes, or None if download fails.
    """
    try:
        with open(path, "rb") as f:
            data = f.read()
        
        logger.info(f"Successfully downloaded file from filesystem: {path}")
        return data
    except FileNotFoundError:
        logger.error(f"File not found at {path}")
        return None
    except Exception as e:
        logger.error(f"Could not fetch file from system path {path}: {e}")
        return None


def _clean_image_path(image_path: str, active_cdn) -> str:
    """
    Clean image path by removing CDN-specific prefixes.
    
    Args:
        image_path: Original image path.
        active_cdn: Active CDN configuration.
        
    Returns:
        Cleaned image path.
    """
    cleaned_path = image_path
    
    for path_prefix in [active_cdn.host, active_cdn.root, active_cdn.path]:
        if path_prefix and path_prefix in cleaned_path:
            cleaned_path = cleaned_path.replace(path_prefix, "")
    
    return cleaned_path


def _download_image_content(active_cdn, image_path: str) -> Optional[bytes]:
    """
    Download image content based on CDN configuration.
    
    Args:
        active_cdn: Active CDN configuration.
        image_path: Path to the image.
        
    Returns:
        Image content as bytes, or None if download fails.
    """
    full_path = f"{active_cdn.upload_path}{image_path}"
    
    if active_cdn.label.lower() == "s3":
        return download_file_from_s3(path=full_path)
    else:
        return download_file_from_system_path(path=full_path)


def get_b64_image(image_url: str, db: Session) -> Optional[str]:
    """
    Convert an image URL to base64 encoded string.
    
    Args:
        image_url: URL or path to the image.
        db: Database session.
        
    Returns:
        Base64 encoded image string with data URI prefix, or None if conversion fails.
    """
    active_cdn = get_active_cdn(db=db)
    if not active_cdn:
        return image_url
    
    image_path = _clean_image_path(image_url, active_cdn)
    downloaded_file_bytes = _download_image_content(active_cdn, image_path)
    
    if not downloaded_file_bytes:
        logger.warning(f"No content found at: {active_cdn.upload_path}{image_path}")
        return None
    
    b64_encoded = base64.b64encode(downloaded_file_bytes).decode('utf-8')
    return f"data:image/png;base64,{b64_encoded}"


def get_image_resolution(image_content: bytes) -> Optional[Tuple[int, int]]:
    """
    Extract image resolution (width, height) from image content.
    
    Supports PNG and JPEG image formats by parsing their binary headers.
    
    Args:
        image_content: Raw bytes of the image file.
        
    Returns:
        Tuple of (width, height) in pixels, or None if parsing fails.
    """
    try:
        return _parse_image_resolution(image_content)
    except Exception as e:
        logger.error(f"Error while parsing image resolution: {e}")
        return None


def _parse_image_resolution(image_content: bytes) -> Optional[Tuple[int, int]]:
    """
    Parse image resolution from binary content for supported formats.
    
    Args:
        image_content: Raw bytes of the image file.
        
    Returns:
        Tuple of (width, height) in pixels, or None if format not supported.
    """
    if len(image_content) < 24:  # Minimum required bytes for header parsing
        return None
        
    # Check for PNG format (signature: 8 bytes)
    if image_content[:8] == b'\x89PNG\r\n\x1a\n':
        return _parse_png_resolution(image_content)
    
    # Check for JPEG format (signature: first 2 bytes)
    elif image_content[:2] == b'\xff\xd8':
        return _parse_jpeg_resolution(image_content)
    
    return None  # Unsupported format


def _parse_png_resolution(image_content: bytes) -> Optional[Tuple[int, int]]:
    """
    Parse PNG image resolution from IHDR chunk.
    
    Args:
        image_content: PNG image bytes.
        
    Returns:
        Tuple of (width, height) in pixels.
    """
    if len(image_content) < 24:
        return None
        
    # PNG has width and height at offset 16 (4 bytes each, big-endian)
    width, height = struct.unpack(">II", image_content[16:24])
    return width, height


def _parse_jpeg_resolution(image_content: bytes) -> Optional[Tuple[int, int]]:
    """
    Parse JPEG image resolution from SOF (Start of Frame) marker.
    
    Args:
        image_content: JPEG image bytes.
        
    Returns:
        Tuple of (width, height) in pixels, or None if SOF not found.
    """
    index = 2  # Skip JPEG signature
    
    while index < len(image_content) - 9:  # Ensure enough bytes for parsing
        # JPEG markers are always prefixed with 0xFF
        if image_content[index] != 0xFF:
            break
            
        marker = image_content[index + 1]
        
        # Check if marker is a Start of Frame (SOF) marker
        if marker in (0xC0, 0xC1, 0xC2, 0xC3):
            return _extract_jpeg_dimensions(image_content, index)
        
        # Skip to next marker
        if index + 3 >= len(image_content):
            break
            
        length = struct.unpack(">H", image_content[index + 2:index + 4])[0]
        index += 2 + length
    
    return None


def _extract_jpeg_dimensions(image_content: bytes, sof_index: int) -> Optional[Tuple[int, int]]:
    """
    Extract dimensions from JPEG SOF marker.
    
    Args:
        image_content: JPEG image bytes.
        sof_index: Index of the SOF marker.
        
    Returns:
        Tuple of (width, height) in pixels.
    """
    try:
        # SOF marker structure: [0xFF] [marker] [length] [precision] [height] [width]
        _, height, width = struct.unpack(
            ">HHH", image_content[sof_index + 3:sof_index + 9]
        )
        return width, height
    except (struct.error, IndexError):
        return None


def calculate_attachment_sizes(mail_attachments) -> Dict[str, float]:
    """
    Calculate and log sizes of email attachments.
    
    Processes a collection of mail attachments and calculates their individual
    and total sizes in bytes, KB, and MB. Logs detailed size information.
    
    Args:
        mail_attachments: Collection of attachment objects with 'byte_content' 
                         and 'original_name' attributes.
                         
    Returns:
        Dictionary containing total size statistics with keys:
        - 'total_bytes': Total size in bytes
        - 'total_kb': Total size in kilobytes  
        - 'total_mb': Total size in megabytes
    """
    total_size_bytes = 0
    attachment_count = 0
    
    for attachment in mail_attachments:
        if not hasattr(attachment, 'byte_content') or not hasattr(attachment, 'original_name'):
            logger.warning("Attachment missing required attributes (byte_content, original_name)")
            continue
            
        byte_content = attachment.byte_content
        if not byte_content:
            logger.info(f"Empty attachment: {getattr(attachment, 'original_name', 'Unknown')}")
            continue
        
        size_info = _calculate_individual_attachment_size(attachment, byte_content)
        total_size_bytes += size_info['bytes']
        attachment_count += 1
    
    # Calculate total sizes
    total_stats = {
        'total_bytes': total_size_bytes,
        'total_kb': total_size_bytes / 1024,
        'total_mb': total_size_bytes / (1024 * 1024),
        'attachment_count': attachment_count
    }
    
    _log_total_attachment_sizes(total_stats)
    return total_stats


def _calculate_individual_attachment_size(attachment, byte_content: bytes) -> Dict[str, float]:
    """
    Calculate size metrics for a single attachment.
    
    Args:
        attachment: Attachment object with original_name attribute.
        byte_content: Raw bytes of the attachment.
        
    Returns:
        Dictionary with size metrics in different units.
    """
    size_bytes = len(byte_content)
    size_kb = size_bytes / 1024
    size_mb = size_kb / 1024
    
    size_info = {
        'bytes': size_bytes,
        'kb': size_kb,
        'mb': size_mb
    }
    
    logger.info(
        f"Attachment: {attachment.original_name} - "
        f"Size: {size_bytes} bytes ({size_kb:.2f} KB, {size_mb:.2f} MB)"
    )
    
    return size_info


def _log_total_attachment_sizes(total_stats: Dict[str, float]) -> None:
    """
    Log total attachment size statistics.
    
    Args:
        total_stats: Dictionary containing total size statistics.
    """
    logger.info(
        f"Total Attachments ({total_stats['attachment_count']} files) - "
        f"Size: {total_stats['total_bytes']} bytes "
        f"({total_stats['total_kb']:.2f} KB, {total_stats['total_mb']:.2f} MB)"
    )