"Service to generate PPT structure."
import json
import sys

from dotenv import load_dotenv
from langchain.agents import AgentExecutor, create_tool_calling_agent
from langchain.prompts import ChatPromptTemplate
from langchain.tools import tool
from langchain_core.output_parsers import JsonOutputParser
from langchain_openai import ChatOpenAI
from pydantic import BaseModel, Field

sys.path.append(r".")
# from utils.logger import ServiceLogger
import logging

from configs.config import OPENAI_API_KEY, OPENAI_MODEL_4O, OPENAI_MODEL_MINI
from services.ppt_generator.data_classes.presentation_outline import SlideLLM
from services.ppt_generator.data_classes.project import Project, Section, Slide

load_dotenv()
XCM_logger = logging.getLogger()


class ReviewerResponse(BaseModel):
    "this can be a list or a single item"
    section_title: str
    slide_title: str
    slide_suggestion: str


class PrivateEquityReviewer(BaseModel):
    persona: str = Field(..., title="This is a CEO or Private Equity Partner")
    industry: str
    sector: str
    pitch_type: str
    presentation_structure: str


@tool("Presentation_Reviewer")
def presentation_reviewer(
    persona: str,
    industry: str,
    sector: str,
    pitch_type: str,
    presentation_structure: str,
):
    "Review the structure of the PPT"

    llm = ChatOpenAI(
        openai_api_key=OPENAI_API_KEY,
        temperature=1,
        model_name=OPENAI_MODEL_MINI,
    )

    chat_prompt = []
    if persona == "CEO":
        chat_prompt.append(("system", "You are a CEO with 20 years of experience."))
    if "Partner" in persona:
        chat_prompt.append(
            ("system", f"You are a {persona} with 20 years of experience.")
        )
    else:
        chat_prompt.append(
            (
                "system",
                f"You are an expert in the {industry} industry and the {sector} sector.",
            )
        )

    chat_prompt.extend(
        [
            (
                "system",
                f"You specialize in the {industry} industry and the {sector} sector.",
            ),
            (
                "system",
                f"You are going to help review the structure of a {pitch_type} presentation.",
            ),
            (
                "human",
                "The presentation has the following sections and slides: {input_sections}",
            ),
            (
                "human",
                """Give me feedback on how to improve the structure of the presentation at the slide level. \
                If you need to add a new slide in a section, then tile the slide title empty.""",
            ),
            ("system", "You will respond in a json object as {json_output}"),
        ]
    )

    prompt = ChatPromptTemplate.from_messages(chat_prompt)
    parser = JsonOutputParser(pydantic_object=ReviewerResponse)
    chain = prompt | llm | parser
    response = chain.invoke(
        {
            "input_sections": presentation_structure,
            "json_output": parser.get_format_instructions(),
        }
    )

    return json.dumps(response)


@tool("Adjust_Slide", args_schema=ReviewerResponse)
def adjust_slide(
    section_title: str, slide_title: str, slide_suggestion: str
) -> Section:
    "Adjust the slide based on the suggestion"
    # get the global project variable
    global project
    project = globals()["project"]
    # find the section from the suggestion
    section: Section = None
    for s in project.sections:
        if s.title == section_title:
            section = s
            break
    # find the slide from the suggestion
    slide: Slide = None
    for s in section.slides:
        if s.title == slide_title:
            slide = s
            break

    llm = ChatOpenAI(
        openai_api_key=OPENAI_API_KEY,
        temperature=0.5,
        model_name=OPENAI_MODEL_MINI,
    )

    chat_prompt = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                f"You are an investment banking expert in the {project.industry} industry and the {project.sector} sector.",
            ),
            (
                "system",
                """You are going to be provided with a suggestion on how to adjust a slide in a presentation and you need to \
            provide a new slide, with the suggestion included.""",
            ),
            ("human", "The Current slide is: {slide_info}"),
            ("human", f"The Suggestion is: {slide_suggestion}"),
            ("system", "You will respond in a json object as {json_output}"),
        ]
    )

    parser = JsonOutputParser(pydantic_object=SlideLLM)

    chain = chat_prompt | llm | parser
    response = chain.invoke(
        {
            "slide_info": json.dumps(
                {
                    "title": slide.title,
                    "content": slide.content,
                    "data": slide.data,
                    "design": slide.design,
                    "slide_order": slide.slide_order,
                }
            ),
            "json_output": parser.get_format_instructions(),
        }
    )

    if isinstance(response, dict):
        response = SlideLLM(**response)
    if isinstance(response, str):
        return "Error: Slide couldn't be updated."

    # update the slide
    # slide = response
    ## update the global project variable
    # find the section from the suggestion
    for s in project.sections:
        if s.title == section_title:
            for _slide in section.slides:
                if _slide.title == slide_title:
                    _slide.title = response.title
                    _slide.content = response.content
                    _slide.data = response.data
                    _slide.design = response.design
                    _slide.slide_order = response.slide_order
                    break

    return "Slide updated."


def main(project_id: str, input_project: Project = None):
    "Main function to run the service"
    global project

    if input_project:
        project = input_project
    else:
        project = Project.check_project_in_db(project_id=project_id)
    #
    model = ChatOpenAI(
        openai_api_key=OPENAI_API_KEY,
        temperature=0.1,
        model_name=OPENAI_MODEL_MINI,
    )

    agent_prompt = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                f"You are an investment banking expert in the {project.industry} industry and the {project.sector} sector.",
            ),
            (
                "human",
                "You have access to a set of tools to get the presentation structure that you created reviewed by a partner.",
            ),
            (
                "human",
                "You need to pass in the full presentation structure and get feedback from your partner.",
            ),
            (
                "human",
                "The presentation has the following sections and slides: {input_sections}",
            ),
            (
                "human",
                "Get the feedback from your partner and adjust as needed. Once done, return 'done with feedback'",
            ),
            (
                "human",
                "At the end, you want to ask for the updated project and then check it for feedback. ",
            ),
            ("placeholder", "{agent_scratchpad}"),
        ]
    )

    # markdown
    project_structure = ""
    for s in project.sections:
        project_structure += f"# Section: {s.title}\n"
        project_structure += f"Section content: {s.content}\n\n"
        for slide in s.slides:
            project_structure += f"## Slide: {slide.title}\n"
            project_structure += f"Slide content: {slide.content}\n\n"
            project_structure += f"Slide design: {slide.design}\n\n"

    tools = [presentation_reviewer, adjust_slide]

    agent = create_tool_calling_agent(
        llm=model,
        prompt=agent_prompt,
        tools=tools,
    )

    agent_executor = AgentExecutor(
        agent=agent, tools=tools, verbose=True, max_iterations=15
    )

    agent_executor.invoke(
        {
            "input_sections": project_structure,
        }
    )

    ## update the project
    project.update_project_in_db()

    return project


if __name__ == "__main__":

    # project = Project.check_project_in_db(project_id="altlaw")

    import pickle

    # with open('/tmp/project.pkl', 'wb') as f:
    #     pickle.dump(project, f)

    with open("/tmp/project.pkl", "rb") as f:
        project = pickle.load(f)

    ## dump the project

    with open("/tmp/project_v2.pkl", "wb") as f:
        pickle.dump(project, f)
