
    dh              	       "   S SK JrJrJrJrJrJrJr  S SKJ	r	  S SK
JrJr  S SKJrJrJr  S SKJrJrJrJrJrJr  S SKJrJrJr  S SKJr  S SKJr  S S	K J!r!  S
\S\"S\"S\"4S jr#SSS.S\\   S\"S\"S\"4S jjr$\	" SSSS9 " S S\\!5      5       r%g)    )AnyAsyncIteratorDictIteratorListOptionalcast)
deprecated)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseChatModelagenerate_from_streamgenerate_from_stream)	AIMessageAIMessageChunkBaseMessageChatMessageHumanMessageSystemMessage)ChatGenerationChatGenerationChunk
ChatResult)PromptValue)
ConfigDict)_AnthropicCommonmessagehuman_prompt	ai_promptreturnc                 j   [        [        U R                  5      n[        U [        5      (       a"  SU R
                  R                  5        SU 3nU$ [        U [        5      (       a	  U SU 3nU$ [        U [        5      (       a	  U SU 3nU$ [        U [        5      (       a  UnU$ [        SU  35      e)Nz

z:  zGot unknown type )r	   strcontent
isinstancer   role
capitalizer   r   r   
ValueError)r   r   r   r#   message_texts        a/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/chat_models/anthropic.py_convert_one_message_to_textr*      s    
 3(G';''gll55787)D  
G\	*	*&q	2  
GY	'	'#AgY/
 	 
G]	+	+  ,WI677    z

Human:z

Assistant:)r   r   messagesc                   ^^ U R                  5       n [        U S   [        5      (       d  U R                  [        SS95        SR	                  UU4S jU  5       5      nUR                  5       $ )a  Format a list of messages into a full prompt for the Anthropic model
    Args:
        messages (List[BaseMessage]): List of BaseMessage to combine.
        human_prompt (str, optional): Human prompt tag. Defaults to "

Human:".
        ai_prompt (str, optional): AI prompt tag. Defaults to "

Assistant:".
    Returns:
        str: Combined string with necessary human_prompt and ai_prompt tags.
     r#   c              3   <   >#    U H  n[        UTT5      v   M     g 7fN)r*   ).0r   r   r   s     r)   	<genexpr>7convert_messages_to_prompt_anthropic.<locals>.<genexpr>B   s$      G 	%WlIFFs   )copyr$   r   appendjoinrstrip)r,   r   r   texts    `` r)   $convert_messages_to_prompt_anthropicr;   /   s[     }}HhrlI..	"-.77  D ;;=r+   z0.0.28z1.0z!langchain_anthropic.ChatAnthropic)sinceremovalalternative_importc                      \ rS rSrSr\" SSS9r\S\\	\	4   4S j5       r
\S\	4S j5       r\S\4S j5       r\S\\	   4S	 j5       rS
\\   S\	4S jrS\S\	4S jr  SS
\\   S\\\	      S\\   S\S\\   4
S jjr  SS
\\   S\\\	      S\\   S\S\\   4
S jjr  SS
\\   S\\\	      S\\   S\S\4
S jjr  SS
\\   S\\\	      S\\   S\S\4
S jjr S\	S\!4S jr"Sr#g)ChatAnthropicK   a  `Anthropic` chat large language models.

To use, you should have the ``anthropic`` python package installed, and the
environment variable ``ANTHROPIC_API_KEY`` set with your API key, or pass
it as a named parameter to the constructor.

Example:
    .. code-block:: python

        import anthropic
        from langchain_community.chat_models import ChatAnthropic
        model = ChatAnthropic(model="<model_name>", anthropic_api_key="my-api-key")
T)populate_by_namearbitrary_types_allowedr   c                 
    SS0$ )Nanthropic_api_keyANTHROPIC_API_KEY selfs    r)   
lc_secretsChatAnthropic.lc_secretsd   s    #%899r+   c                     g)zReturn type of chat model.zanthropic-chatrG   rH   s    r)   	_llm_typeChatAnthropic._llm_typeh   s      r+   c                     g)z9Return whether this model can be serialized by Langchain.TrG   clss    r)   is_lc_serializable ChatAnthropic.is_lc_serializablem   s     r+   c                 
    / SQ$ )z*Get the namespace of the langchain object.)	langchainchat_models	anthropicrG   rP   s    r)   get_lc_namespaceChatAnthropic.get_lc_namespacer   s
     98r+   r,   c                     0 nU R                   (       a  U R                   US'   U R                  (       a  U R                  US'   [        SSU0UD6$ )zFormat a list of messages into a full prompt for the Anthropic model
Args:
    messages (List[BaseMessage]): List of BaseMessage to combine.
Returns:
    str: Combined string with necessary HUMAN_PROMPT and AI_PROMPT tags.
r   r   r,   rG   )HUMAN_PROMPT	AI_PROMPTr;   )rI   r,   prompt_paramss      r)   _convert_messages_to_prompt)ChatAnthropic._convert_messages_to_promptw   sJ     ,0,=,=M.)>>)-M+&3WXWWWr+   promptc                 @    U R                  UR                  5       5      $ r2   )r^   to_messages)rI   r`   s     r)   convert_promptChatAnthropic.convert_prompt   s    //0B0B0DEEr+   Nstoprun_managerkwargsc              +   :  #    U R                  U5      nSU0U R                  EUEnU(       a  X&S'   U R                  R                  R                  " S0 UDSS0D6nU H9  nUR
                  n	[        [        U	S9S9n
U(       a  UR                  XS9  U
v   M;     g 7f	Nr`   stop_sequencesstreamTr0   r   )chunkrG   )	r^   _default_paramsclientcompletionscreate
completionr   r   on_llm_new_tokenrI   r,   re   rf   rg   r`   paramsstream_respdatadeltarm   s              r)   _streamChatAnthropic._stream   s      11(;"*F!Ud6J6J!Uf!U'+#$kk--44KvKdKDOOE'u0MNE,,U,@K  s   BBc                r  #    U R                  U5      nSU0U R                  EUEnU(       a  X&S'   U R                  R                  R                  " S0 UDSS0D6I S h  vN nU  S h  vN nUR
                  n	[        [        U	S9S9n
U(       a  UR                  XS9I S h  vN   U
7v   MJ   NP NG N
 g 7fri   )	r^   rn   async_clientrp   rq   rr   r   r   rs   rt   s              r)   _astreamChatAnthropic._astream   s      11(;"*F!Ud6J6J!Uf!U'+#$ --99@@W6WRVWW% 	$OOE'u0MNE!2252FFFK X	 G	 &sH   AB7B/B7%B5)B1*B5-6B7#B3$B71B53B75B7c                 X   U R                   (       a   U R                  " U4X#S.UD6n[        U5      $ U R                  U5      nSU0U R                  EUEnU(       a  X'S'   U R
                  R                  R                  " S0 UD6nUR                  n	[        U	S9n
[        [        U
S9/S9$ N)re   rf   r`   rj   r0   rl   )generationsrG   )	streamingry   r   r^   rn   ro   rp   rq   rr   r   r   r   rI   r,   re   rf   rg   stream_iterr`   ru   responserr   r   s              r)   	_generateChatAnthropic._generate   s     >>,,#@FK (4411
 f"
"""
 "

 '+#$;;**11;F;((
J/~g'F&GHHr+   c                   #    U R                   (       a(  U R                  " U4X#S.UD6n[        U5      I S h  vN $ U R                  U5      nSU0U R                  EUEnU(       a  X'S'   U R
                  R                  R                  " S0 UD6I S h  vN nUR                  n	[        U	S9n
[        [        U
S9/S9$  N N,7fr   )r   r}   r   r^   rn   r|   rp   rq   rr   r   r   r   r   s              r)   
_agenerateChatAnthropic._agenerate   s      >>--#@FK /{;;;11
 f"
"""
 "

 '+#$**66==GGG((
J/~g'F&GHH < Hs"   5CB>ACC +C Cr:   c                 \    U R                   (       d  [        S5      eU R                  U5      $ )zCalculate number of tokens.z-Please ensure the anthropic package is loaded)count_tokens	NameError)rI   r:   s     r)   get_num_tokensChatAnthropic.get_num_tokens   s(      KLL  &&r+   rG   )NN)$__name__
__module____qualname____firstlineno____doc__r   model_configpropertyr   r"   rJ   rM   classmethodboolrR   r   rX   r   r^   r   rc   r   r   r   r   r   ry   r   r   r}   r   r   r   intr   __static_attributes__rG   r+   r)   r@   r@   K   s1     $L
 :DcN : :  3     4   9c 9 9XD4E X# XF[ FS F %):>	{# tCy! 67	
  
%	&. %)?C	{# tCy! ;<	
  
*	+. %):>	I{#I tCy!I 67	I
 I 
I< %)?C	I{#I tCy!I ;<	I
 I 
I6'3 '3 'r+   r@   N)&typingr   r   r   r   r   r   r	   langchain_core._api.deprecationr
   langchain_core.callbacksr   r   *langchain_core.language_models.chat_modelsr   r   r   langchain_core.messagesr   r   r   r   r   r   langchain_core.outputsr   r   r   langchain_core.prompt_valuesr   pydanticr   "langchain_community.llms.anthropicr   r"   r*   r;   r@   rG   r+   r)   <module>r      s    K K K 6 
  S R 4  ?  		, %%	;  	
 	8 
:
Z'M#3 Z'
Z'r+   