"Fast API module for general utilities"
import base64
import json
import logging
import os
from datetime import datetime
from io import BytesIO
from typing import IO, Optional

import aiohttp
import backoff
import boto3
import redis
import requests
from botocore.exceptions import ClientError
from fastapi import APIRouter, Body, HTTPException, Query, UploadFile
from fastapi.responses import (FileResponse, JSONResponse, RedirectResponse,
                               Response, StreamingResponse)

from services.company_profile.logo_standalone import get_brand_from_url
from services.stock_info.public_comparable import PublicComparables
from utils import validate_url
from utils.brand.brand_fetch import fetch_brand_details
from utils.google_fonts_service import get_google_fonts_list
from utils.s3_storage import S3BucketStorage

utilities = APIRouter(
    prefix="/utilities",
    tags=["utilities"],
    responses={404: {"description": "Not found"}},
    dependencies=[],  # add dependencies here
)


@utilities.get("/validate_url")
def validate(url: str):
    "Validate the input url"
    validate_response = validate_url.validate_url(url)
    if validate_response:
        return {"message": "URL is valid"}

    raise HTTPException(status_code=404, detail="URL is not valid")


@utilities.get("/validate/ticker")
def validate_ticker(ticker: str):
    "Validate the input ticker"
    ticker_data = PublicComparables().get_ticker_data(ticker)
    if not ticker_data:
        return HTTPException(status_code=404, detail="Ticker is not valid")

    return 200


@utilities.post("/upload_file/")
async def upload_file_to_s3_bucket(
    s3_file_path: str = Query(...),
    source_file_path: Optional[str] = Query(None),
    s3_file: Optional[dict] = Body(None),
):
    """
    Uploads a file to the specified location in the S3 bucket and returns a JSON response indicating the result.

    Args:
        s3_file_path (str): The path to upload the file to in the S3 bucket.
        source_file_path (str): The path of the file to upload from the local system.

    Returns:
        JSONResponse: A JSON response indicating the success or failure of the upload operation.
    """
    try:
        # Specify the bucket name
        xcap_s3_storage = os.environ["xcap_s3_storage"]

        # Accepts Binary file base64 encoded
        s3_file = s3_file.get("s3_file", "")

        # Create an instance of S3BucketStorage
        s3_storage_obj = S3BucketStorage(bucket_name=xcap_s3_storage)

        # Call the upload_file_to_s3_bucket method to upload the file
        result = s3_storage_obj.upload_file_to_s3_bucket(
            s3_file_path=s3_file_path,
            source_file_path=source_file_path,
            s3_file=s3_file,
        )

        # Return a JSON response indicating successful upload
        if result:
            return JSONResponse(status_code=200, content={"message": result})

    except ClientError as e:
        # Handle ClientError exceptions
        if e.response["Error"]["Code"] == "NoSuchKey":
            # Raise an HTTPException with 500 status code and detailed error message
            raise HTTPException(
                status_code=500, detail=f"Exception occurred: {e}"
            ) from e
        else:
            # Raise an HTTPException with 500 status code and general error message
            raise HTTPException(status_code=500, detail=f"Error: {e}") from e

    except ValueError as ve:
        # Log the error and raise an HTTPException with 400 status code and detailed error message for ValueErrors
        raise HTTPException(status_code=400, detail={"message": f"Error: {ve}"}) from ve

    except HTTPException as he:
        # Raise an HTTPException with 400 status code and detailed error message for HTTPExceptions
        raise HTTPException(status_code=400, detail={"message": f"Error: {he}"}) from he


@utilities.get("/get_file/")
async def get_file_from_s3_bucket(
    s3_file_path: str = None,
):
    """
    Retrieves a file from the specified location in the S3 bucket and returns its content as base64 encoded JSON.

    Args:
        s3_file_path (str): The path of the file to retrieve from the S3 bucket.

    Returns:
        JSONResponse: A JSON response containing the base64 encoded file content.
    """
    try:
        # Specify the bucket name
        xcap_s3_storage = os.environ["xcap_s3_storage"]

        # Create an instance of S3BucketStorage
        s3_storage_obj = S3BucketStorage(bucket_name=xcap_s3_storage)

        # Call the get_file_from_s3_bucket method to retrieve the file content
        file_content = s3_storage_obj.get_file_from_s3_bucket(s3_file_path=s3_file_path)

        # Encode the file content as base64
        file_content = base64.b64encode(file_content).decode("utf-8")

        # Return a JSON response with the base64 encoded file content
        if file_content:
            return JSONResponse(status_code=200, content={"message": file_content})

    except ClientError as e:
        # Handle ClientError exceptions
        if e.response["Error"]["Code"] == "NoSuchKey":
            # Raise an HTTPException with 500 status code and detailed error message
            raise HTTPException(
                status_code=400, detail=f"Exception occurred: {e}"
            ) from e
        else:
            # Raise an HTTPException with 500 status code and general error message
            raise HTTPException(status_code=400, detail=f"Error: {e}") from e

    except ValueError as ve:
        # Log the error and raise an HTTPException with 400 status code and detailed error message for ValueErrors
        raise HTTPException(status_code=400, detail={"message": f"Error: {ve}"}) from ve


@utilities.get("/search_files/")
async def search_files_in_s3_bucket(
    keyword: str,
):
    """
    Search for files containing the specified keyword in the S3 bucket and return a JSON response.

    Args:
        keyword (str): The keyword to search for in file names.

    Returns:
        JSONResponse: A JSON response containing the list of matching file paths.
    """
    try:
        # Specify the bucket name
        xcap_s3_storage = os.environ["xcap_s3_storage"]

        # Create an instance of S3BucketStorage
        s3_storage_obj = S3BucketStorage(bucket_name=xcap_s3_storage)

        # Call the search_files_in_bucket method to search for files
        matching_files = s3_storage_obj.search_files_in_bucket(keyword=keyword)

        # Return a JSON response with the list of matching file paths
        return JSONResponse(status_code=200, content={"matching_files": matching_files})

    except ClientError as e:
        # Handle ClientError exceptions
        if e.response["Error"]["Code"] == "NoSuchKey":
            # Raise an HTTPException with 500 status code and detailed error message
            raise HTTPException(
                status_code=400, detail=f"Exception occurred: {e}"
            ) from e
        else:
            # Raise an HTTPException with 500 status code and general error message
            raise HTTPException(status_code=400, detail=f"Error: {e}") from e

    except ValueError as ve:
        # Log the error and raise an HTTPException with 400 status code and detailed error message for ValueErrors
        raise HTTPException(status_code=400, detail={"message": f"Error: {ve}"}) from ve


@utilities.get("scheduled_task_status/{task_id}")
async def research_slide_status(task_id: str):
    "Check the status of the research slide task."
    from tasks.celery_worker import celery

    task = celery.AsyncResult(task_id)
    if task.ready():
        return {"task_id": task_id, "status": task.status, "result": task.result}
    else:
        return {"task_id": task_id, "status": task.status}


@utilities.get("/fonts/google")
async def get_google_fonts():
    """
    Retrieves a list of all available Google Font families.
    Caches the response for 7 days to minimize API calls.

    Returns:
        JSONResponse: A JSON response containing just the font family names.
    """
    fonts_data = await get_google_fonts_list()
    return JSONResponse(status_code=200, content=fonts_data)


@utilities.get("/fetch_brand")
async def fetch_brand(url: str):
    """
    Fetch brand details for a given URL using the Brandfetch API.

    Args:
        url (str): The URL of the company to fetch brand details for.

    Returns:
        JSONResponse: The brand details retrieved from the Brandfetch API.
    """
    data = await fetch_brand_details(url)
    return JSONResponse(
        status_code=200,
        content={"data": data},
    )


@utilities.get("/fetch-and-preview")
async def download_pdf(url: str = None):
    if not url:
        raise HTTPException(status_code=400, detail="URL is required")

    try:
        # Download the PDF from the URL using aiohttp
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as response:
                response.raise_for_status()
                pdf_content = BytesIO(await response.read())

        # Return the file as a response with proper headers
        return StreamingResponse(
            pdf_content,
            media_type="application/pdf",
            headers={"Content-Disposition": "inline; filename=invoice.pdf"},
        )
    except aiohttp.ClientError as e:
        return Response(content=str(e), status_code=500)


@utilities.get("/sign_blob")
def generate_signed_url_blob(
    unsigned_url: str = Query(..., description="Unsigned URL or S3 object key")
):
    """
    Generates a signed URL for an S3 object from an unsigned URL or S3 file key,
    and returns a response based on the file type.

    For image files (e.g. .jpg, .png, etc.) the function will return a redirect
    to the signed URL so that it can be used directly in an <img src="..."> tag.
    For other file types, it returns the signed URL inside a JSON response.

    Query Parameters:
        unsigned_url (str): The unsigned URL or S3 object key.

    Returns:
        Either a RedirectResponse (for image files) or a JSONResponse with the signed URL.
    """
    try:
        # Retrieve S3 bucket name from environment variables
        xcap_s3_storage = os.environ["xcap_s3_storage"]

        # Determine the S3 key from provided unsigned_url
        from urllib.parse import urlparse

        if unsigned_url.startswith("http"):
            parsed = urlparse(unsigned_url)
            s3_key = parsed.path.lstrip("/")
        else:
            s3_key = unsigned_url

        if not s3_key:
            raise HTTPException(
                status_code=400, detail="Invalid unsigned URL provided."
            )

        # Create S3 storage instance and generate signed URL
        s3_storage = S3BucketStorage(bucket_name=xcap_s3_storage)
        signed_url, mime_type = s3_storage.generate_signed_url(s3_key)

        if mime_type and mime_type.startswith("image/"):
            # For image files, return a redirect
            return RedirectResponse(url=signed_url)
        else:
            # For non-image files, return the signed URL in JSON
            return JSONResponse(content={"signed_url": signed_url})

    except ValueError as e:
        raise HTTPException(status_code=500, detail=str(e)) from e
    except Exception as ex:
        raise HTTPException(status_code=500, detail=f"Unexpected error: {ex}") from ex
