import logging
import os

from utils.document_loader.DocUploader import DocUploader
from utils.s3_storage import S3BucketStorage, get_default_bucket_name
from services.ppt_generator.data_classes.project import Project
from utils.dynamo_db import DynamoDB
from concurrent.futures import ThreadPoolExecutor

from utils.client_check import ClientConfig


class ChatDocumentUploader:
    def __init__(self, project: Project, client_config: ClientConfig):
        self.project = project
        self.client_config = client_config

    def doc_uploader_to_chromadb(
        self,
        bucket_name: str,
        doc_path: str,
    ):

        try:
            "Parse and Upload the document to ChromaDB"
            doc_path = f"s3://{bucket_name}/{doc_path}"
            doc_uploader = DocUploader(self.project, self.client_config, doc_path)
            doc_uploader.customer_chroma_collection = "chat_" + self.project.project_id
            doc_uploader.upload_to_chromadb()
            print(f"Uploaded {doc_path} to DynamoDB")
        except Exception as e:
            logging.error("Error processing document: %s", doc_path, exc_info=True)

    def doc_upload_call_to_chromadb(
        self,
        nested_path: str = "",
        team_id: str = "",
    ):
        "Upload the document to ChromaD only without vectorizing and storing to s3"
        folder_name = f"{team_id}/{self.project.project_id}/"
        if nested_path:
            folder_name = f"{folder_name}{nested_path}/"

        db = DynamoDB()
        project = db.get_item(db.projects, self.project.project_id)
        project = Project(**project)

        bucket_name = get_default_bucket_name()
        s3_storage = S3BucketStorage(bucket_name=bucket_name)
        files = s3_storage.list_files_in_folder(bucket_name, folder_name)

        with ThreadPoolExecutor(
            max_workers=min(1, os.cpu_count() // 2)
        ) as executor:  # Adjust `max_workers` as needed
            futures = [
                executor.submit(
                    self.doc_uploader_to_chromadb,
                    bucket_name,
                    doc_path,
                )
                for doc_path in files
            ]
            for future in futures:
                future.result()  # Wait for all uploads to finish

        return {"message": "Files uploaded successfully."}
