"""Module for interacting with S3 bucket files"""

import base64
import logging
import os
import time
from datetime import datetime
from io import BytesIO
from typing import IO
from urllib.parse import urlparse

import backoff
import boto3
from botocore.exceptions import ClientError

from utils.backoff_handlers import backoff_hdlr, failure_hdlr, success_hdlr


class S3BucketStorage:
    """
    Class to handle operations related to an S3 bucket, such as uploading and getting files.
    """

    def __init__(self, bucket_name: str = "xcap-bucket") -> None:
        """
        Initializes the S3BucketStorage class with the specified bucket name.

        Args:
            bucket_name (str): The name of the S3 bucket.
        """
        self.s3_client = boto3.client("s3")
        self.bucket_name = bucket_name

    @backoff.on_exception(
        backoff.expo,
        ClientError,
        max_tries=5,
        on_backoff=backoff_hdlr,
        on_success=success_hdlr,
        on_giveup=failure_hdlr,
    )
    def upload_file_to_s3_bucket(
        self, s3_file_path: str, source_file_path: str = None, s3_file: str = None
    ):
        """
        Uploads a file to the specified location in the S3 bucket with retry logic.

        Args:
            s3_file_path (str): The path to upload the file to in the S3 bucket.
            source_file_path (str): The path of the file to upload from the local system.
            max_retries (int): Maximum number of retries if upload fails (default is 5).
            retry_delay (int): Delay between retries in seconds (default is 2).

        Returns:
            str: A message indicating the success or failure of the upload operation.
        """
        # Extract the file name from the S3 file path
        if "/" in s3_file_path:
            file_name = os.path.basename(s3_file_path)
        else:
            file_name = s3_file_path

        try:
            if not s3_file and s3_file_path and source_file_path:
                # Open the source file in binary read mode
                with open(source_file_path, "rb") as source_file:
                    # Upload the file object to the specified location in the S3 bucket
                    self.s3_client.upload_fileobj(
                        source_file, self.bucket_name, s3_file_path
                    )
                    self.s3_file_location = self.create_object_bucket_url(
                        self.bucket_name, s3_file_path
                    )
            elif s3_file and s3_file_path:
                # s3_file_bytes = base64.b64decode(s3_file.encode("utf-8"))
                response = self.s3_client.put_object(
                    Body=s3_file, Bucket=self.bucket_name, Key=s3_file_path
                )
                self.s3_file_location = self.create_object_bucket_url(
                    self.bucket_name, s3_file_path
                )
            elif source_file_path and not os.path.exists(source_file_path):
                # Check if the source file exists locally
                raise ValueError(
                    f"File {source_file_path} does not exist or file is not provided."
                )
            else:
                raise ValueError("Enter source file path or base64 encode binary file.")

            return f"File {file_name} uploaded to {s3_file_path} in {self.bucket_name} bucket."

        except ValueError as ve:
            # Raise an ValueError detailed error message for ValueErrors
            logging.error(
                "Error %s:\n Failed uploading file %s to S3 bucket %s. \nError %s",
                datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
                source_file_path,
                self.bucket_name,
                ve,
            )
            raise ValueError(
                f"Failed uploading {s3_file_path} to {self.bucket_name} bucket: {ve}"
            ) from ve

    def create_object_bucket_url(self, bucket_name: str, object_key: str):
        "Create the object URL for S3"
        return f"https://{bucket_name}.s3.amazonaws.com/{object_key}"

    @backoff.on_exception(
        backoff.expo,
        ClientError,
        max_tries=5,
        on_backoff=backoff_hdlr,
        on_success=success_hdlr,
        on_giveup=failure_hdlr,
    )
    def get_file_from_s3_bucket(
        self, s3_file_path: str = None, max_retries: int = 5, retry_delay: int = 2
    ):
        """
        Retrieves a file from the specified location in the S3 bucket with retry logic.

        Args:
            s3_file_path (str): The path of the file to retrieve from the S3 bucket.
            max_retries (int): Maximum number of retries if retrieval fails (default is 5).
            retry_delay (int): Delay between retries in seconds (default is 2).

        Returns:
            bytes: The content of the retrieved file as binary data.
        """
        try:
            # Get the object (file) from the specified folder in the S3 bucket
            # if the s3_file_pat is a S3 URI
            if s3_file_path.startswith("s3://"):
                parsed_uri = urlparse(s3_file_path)
                self.bucket_name = parsed_uri.netloc
                s3_file_path = parsed_uri.path.lstrip("/")

            response = self.s3_client.get_object(
                Bucket=self.bucket_name, Key=s3_file_path
            )

            # Check if the response status code is 200 (OK)
            if response["ResponseMetadata"]["HTTPStatusCode"] == 200:
                # Read the file content as binary data
                file_content = response["Body"].read()

                # Check if the file content is not empty
                if not file_content:
                    raise ValueError(
                        f"File content is empty at {s3_file_path} in {self.bucket_name}"
                    )

                return file_content

            else:
                raise ValueError(
                    f"Error getting object from {self.bucket_name}. File not found at {s3_file_path}"
                )

        except ValueError as ve:
            # Raise a ValueError with detailed error message for ValueErrors
            logging.error(
                "Error %s Error: Empty file content %s from S3 bucket %s. \nError: %s",
                datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
                s3_file_path,
                self.bucket_name,
                ve,
            )
            raise ValueError(f"Failed getting {s3_file_path}: {ve}") from ve

    @backoff.on_exception(
        backoff.expo,
        ClientError,
        max_tries=5,
        on_backoff=backoff_hdlr,
        on_success=success_hdlr,
        on_giveup=failure_hdlr,
    )
    def search_files_in_bucket(self, keyword, max_retries=5):
        """
        Search for files containing the specified keyword in the S3 bucket with retry logic.

        Args:
            keyword (str): The keyword to search for in file names.
            max_retries (int): Maximum number of retries if retrieval fails (default is 5).
            retry_delay (int): Delay between retries in seconds (default is 2).

        Returns:
            list: A list of file paths in the bucket that contain the keyword.
        """
        try:
            matching_files = []
            response = self.s3_client.list_objects_v2(Bucket=self.bucket_name)

            for obj in response.get("Contents", []):
                file_key = obj["Key"]
                if "." in file_key:
                    file_name = file_key.split("/")[-1].split(".")[0]
                    if keyword in file_name:
                        matching_files.append(file_key)

            if len(matching_files) <= 0:
                raise ValueError(
                    f"No files in S3 bucket {self.bucket_name} has keyword {keyword}"
                )
            return matching_files

        except ValueError as ve:
            logging.error(
                "Error %s: Error searching files in S3 bucket %s with keyword '%s'. \nError:%s",
                datetime.now().strftime("%Y/%m/%d %H:%M:%S"),
                self.bucket_name,
                keyword,
                ve,
            )
            raise ValueError(
                f"Error searching files in S3 bucket {self.bucket_name} with keyword '{keyword}': {ve}"
            ) from ve

    @backoff.on_exception(
        backoff.expo,
        ClientError,
        max_tries=5,
        on_backoff=backoff_hdlr,
        on_success=success_hdlr,
        on_giveup=failure_hdlr,
    )
    def bulk_file_upload(self, source_folder: str = None, s3_file_path: str = None):
        """
        Bulk File Upload method to perform upload and download operations with the S3 bucket.

        Args:
            source_folder (str): The path of the local folder containing files to upload.
            s3_file_path (str): The path of folder to upload files to in the S3 bucket.

        Returns:
            bytes: The content of the retrieved file from the S3 bucket.
        """
        try:
            if not s3_file_path:
                s3_file_path = source_folder
            # Loop through files in the source folder
            for file in os.listdir(source_folder):
                if os.path.isfile(os.path.join(source_folder, file)):
                    # Get the full path of the source file and the S3 file path
                    source_file_path = os.path.join(source_folder, file)
                    s3_file_path = s3_file_path + file

                    # Upload the file to S3 bucket
                    response = self.upload_file_to_s3_bucket(
                        s3_file_path=s3_file_path, source_file_path=source_file_path
                    )

            return f"Files uploaded to {s3_file_path} successfully."

        except ValueError as ve:
            print(f"Error: {ve}")
            raise ValueError({"message": f"Error: {ve}"}) from ve

    def generate_signed_url(
        self, s3_key: str, expires_in: int = 3600
    ) -> tuple[str, str]:
        """
        Generates a signed URL for an S3 object.

        Args:
            s3_key (str): The S3 object key
            expires_in (int): URL expiration time in seconds (default: 1 hour)

        Returns:
            tuple[str, str]: A tuple containing (signed_url, mime_type)
        """
        import mimetypes

        try:
            # Generate presigned URL
            signed_url = self.s3_client.generate_presigned_url(
                ClientMethod="get_object",
                Params={"Bucket": self.bucket_name, "Key": s3_key},
                ExpiresIn=expires_in,
            )

            # Determine file type
            mime_type, _ = mimetypes.guess_type(s3_key)

            return signed_url, mime_type

        except ClientError as e:
            raise ValueError(f"Error generating signed URL: {e}") from e


if __name__ == "__main__":
    BUCKET_NAME = "xcap-storage-dev"
    SOURCE_FOLDER = "images/"
    S3_FILE_PATH = "generic/images/unity/"

    s3_storage_obj = S3BucketStorage(bucket_name=BUCKET_NAME)
    response = s3_storage_obj.bulk_file_upload(SOURCE_FOLDER, S3_FILE_PATH)
