"""Module for handling document uploads to S3"""

import asyncio
import logging
import mimetypes
import os
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
from typing import List, Optional, Union

import aiofiles
from fastapi import UploadFile

from models.team import Team
from services.ppt_generator.data_classes.project import Project
from utils.client_check import ClientConfig
from utils.document_loader.DocUploader import DocUploader
from utils.s3_storage import S3BucketStorage


class DocUploaderV2:

    def __init__(self, project: Project, team: Team, files: List[UploadFile]):
        self.uploader = DocumentUploaderS3(project=project, team=team)
        self.files = files  # Store the files parameter
        self.project_files = self.files
        self.project = project
        self.uploaded_files = []
        self.client = team

    async def store_on_s3(self, files: list):
        "Store files on S3"
        upload_results = []
        for file in files:
            # Upload file content to S3
            upload_result = await self.uploader.upload_files(
                upload_file=file,
            )

            print("file uploaded", upload_result)
            if upload_result:
                upload_results.append(
                    {
                        "file_name": file.filename,
                        "file_url": upload_result,
                        "file_type": file.content_type,
                        "file_size": file.size,
                    }
                )

        self.uploaded_files = upload_results

        return upload_results

    def get_uploaded_files(self):
        "Get the uploaded files"
        return self.uploaded_files

    # def store_on_chromadb(self, files: list):
    #     "Store files on ChromaDB"
    #     print(files)
    #     print("Inside store on chromadb")
    #     for file in files:
    #         doc_uploader = DocUploader(
    #             file,
    #             self.project,
    #             self.client,
    #         )
    #         if doc_uploader.supported:
    #             doc_uploader.upload_to_chromadb()


class DocumentUploaderS3:
    """Class to handle document uploads to S3 bucket"""

    def __init__(self, project: Project, team: Team):
        """
        Initialize DocumentUploader with project and client configuration

        Args:
            project (Project): Project instance containing project details
            client (ClientConfig, optional): Client configuration. Defaults to XCM client.
        """
        self.team = team
        self.project = project
        xcap_s3_storage = os.environ["xcap_s3_storage"]
        self.s3_storage = S3BucketStorage(bucket_name=xcap_s3_storage)

    ALLOWED_EXTENSIONS = {
        # Documents
        ".pdf",
        ".doc",
        ".docx",
        ".txt",
        ".rtf",
        ".odt",
        # Spreadsheets
        ".xls",
        ".xlsx",
        ".csv",
        # Presentations
        ".ppt",
        ".pptx",
        # Images
        ".jpg",
        ".jpeg",
        ".png",
        ".gif",
        ".svg",
        # Other
        ".zip",
        ".rar",
    }

    MAX_FILE_SIZE = 100 * 1024 * 1024  # 100MB

    def validate_file(self, filename: str, filesize: int) -> tuple[bool, str]:
        """
        Validate file extension and size

        Args:
            filename (str): Name of the file
            filesize (int): Size of file in bytes

        Returns:
            tuple[bool, str]: (is_valid, error_message)
        """
        ext = Path(filename).suffix.lower()

        if ext not in self.ALLOWED_EXTENSIONS:
            return False, f"File extension {ext} not allowed"

        if filesize > self.MAX_FILE_SIZE:
            return (
                False,
                f"File size exceeds maximum allowed size of {self.MAX_FILE_SIZE/1024/1024}MB",
            )

        return True, ""

    async def upload_files(
        self, upload_file: UploadFile, temp_dir: str = "temp"
    ) -> Optional[str]:
        """
        Handle FastAPI UploadFile upload

        Args:
            upload_file (UploadFile): FastAPI UploadFile object
            temp_dir (str): Temporary directory for file processing

        Returns:
            Optional[str]: S3 URL if successful
        """
        try:
            # Validate file
            is_valid, error = self.validate_file(
                upload_file.filename, await self._get_file_size(upload_file)
            )
            if not is_valid:
                raise ValueError(error)

            # Ensure temp directory exists
            os.makedirs(temp_dir, exist_ok=True)

            # Save file temporarily
            temp_path = Path(temp_dir) / upload_file.filename
            async with aiofiles.open(temp_path, "wb") as f:
                while content := await upload_file.read(1024 * 1024):  # Read 1MB chunks
                    await f.write(content)

            # Upload to S3
            return await self.upload_file_async(str(temp_path))

        finally:
            # Cleanup
            if "temp_path" in locals():
                os.unlink(temp_path)

    async def upload_file_async(self, file_path: str) -> str:
        """
        Asynchronously upload file to S3
        """
        loop = asyncio.get_event_loop()
        with ThreadPoolExecutor() as pool:
            return await loop.run_in_executor(pool, self._upload_single_file, file_path)

    def _upload_single_file(self, file_path: str) -> str:
        """
        Upload single file to S3 with proper content type
        """
        file_name = Path(file_path).name
        content_type = mimetypes.guess_type(file_path)[0] or "application/octet-stream"
        # Convert project_id to string to ensure proper path formatting
        project_id_str = str(self.project.project_id)
        s3_file_path = f"{self.team.id}/{project_id_str}/{file_name}"

        # Upload with content type
        self.s3_storage.upload_file_to_s3_bucket(
            s3_file_path=s3_file_path,
            source_file_path=file_path,
            # extra_args={'ContentType': content_type}
        )

        return self.s3_storage.s3_file_location

    async def _get_file_size(self, upload_file: UploadFile) -> int:
        """Get total file size from UploadFile"""
        size = 0
        while chunk := await upload_file.read(8192):
            size += len(chunk)
        await upload_file.seek(0)
        return size

    async def bulk_upload(
        self, files: List[Union[str, UploadFile]], temp_dir: str = "temp"
    ) -> List[str]:
        """
        Bulk upload multiple files

        Args:
            files: List of file paths or UploadFile objects
            temp_dir: Temporary directory for processing

        Returns:
            List[str]: List of S3 URLs
        """
        uploaded_urls = []

        for file in files:
            try:
                if isinstance(file, UploadFile):
                    url = await self.upload_files(file, temp_dir)
                else:
                    url = await self.upload_file_async(file)
                uploaded_urls.append(url)
            except Exception as e:
                logging.error(f"Failed to upload {file}: {str(e)}")
                continue

        return uploaded_urls


if __name__ == "__main__":
    # Example usage
    project = Project.check_project_in_db(project_id="your_project_id")
    team = Team.check_team_in_db(team_id="d4d6f97c-3f5c-4f19-983d-62f754f6c9f4")

    uploader = DocumentUploaderS3(project=project, team=team)

    # Example: Upload local files
    files_to_upload = ["path/to/your/file1.pdf", "path/to/your/file2.docx"]
    uploaded_urls = uploader.upload_files(files_to_upload)

    # Example: Upload file content directly
    content = "This is some test content"
    uploaded_url = uploader.upload_file_content(content, "test.txt")
