
    Ah                         S SK JrJr  S SKJr  S SKJr  S SKJr  S SK	J
r
  S SKJr  S SKJrJr   " S S	\\5      r " S
 S\5      rS\S\4S jrS\S\4S jrg)    )ABCabstractmethod)Callable)BaseLanguageModel)BaseChatModel)BaseLLM)BasePromptTemplate)	BaseModelFieldc                   4    \ rS rSrSr\S\S\4S j5       rSr	g)BasePromptSelector   z Base class for prompt selectors.llmreturnc                     g)z(Get default prompt for a language model.N )selfr   s     X/var/www/html/shao/venv/lib/python3.13/site-packages/langchain/chains/prompt_selector.py
get_promptBasePromptSelector.get_prompt   s        r   N)
__name__
__module____qualname____firstlineno____doc__r   r   r	   r   __static_attributes__r   r   r   r   r      s'    *7/ 74F 7 7r   r   c                   l    \ rS rSr% Sr\\S'    \" \S9r	\\
\\/\4   \4      \S'    S\S\4S jrS	rg
)ConditionalPromptSelector   z1Prompt collection that goes through conditionals.default_prompt)default_factoryconditionalsr   r   c                 f    U R                    H  u  p#U" U5      (       d  M  Us  $    U R                  $ )zGet default prompt for a language model.

Args:
    llm: Language model to get prompt for.

Returns:
    Prompt to use for the language model.
)r#   r!   )r   r   	conditionprompts       r   r   $ConditionalPromptSelector.get_prompt   s3     "&!2!2I~~ "3 """r   r   N)r   r   r   r   r   r	   __annotations__r   listr#   tupler   r   boolr   r   r   r   r   r   r      s`    ;&&9 	d# $h)*D013EEF $ M#/ #4F #r   r   r   r   c                 "    [        U [        5      $ )zCheck if the language model is a LLM.

Args:
    llm: Language model to check.

Returns:
    True if the language model is a BaseLLM model, False otherwise.
)
isinstancer   r   s    r   is_llmr/   ,   s     c7##r   c                 "    [        U [        5      $ )zCheck if the language model is a chat model.

Args:
    llm: Language model to check.

Returns:
    True if the language model is a BaseChatModel model, False otherwise.
)r-   r   r.   s    r   is_chat_modelr1   8   s     c=))r   N)abcr   r   typingr   langchain_core.language_modelsr   *langchain_core.language_models.chat_modelsr   #langchain_core.language_models.llmsr   langchain_core.promptsr	   pydanticr
   r   r   r   r+   r/   r1   r   r   r   <module>r9      s]    #  < D 7 5 %7C 7# 2 #2	$! 	$d 	$	*( 	*T 	*r   