
    dhn                         S SK r S SKJrJrJrJr  S SKrS SKJr  S SK	J
r
  S SKJr  \ R                  " \5      r " S S\
5      rg)    N)AnyListMappingOptional)CallbackManagerForLLMRun)LLM)enforce_stop_tokensc                      \ rS rSr% SrSr\\S'    Sr\	\
   \S'    Sr\\S'    S	r\\S
'    / r\\   \S'    Sr\\S'    Sr\\S'    \S\4S j5       r\S\\\4   4S j5       r  SS\S\	\\      S\	\   S\S\4
S jjrSrg)ChatGLM   a  ChatGLM LLM service.

Example:
    .. code-block:: python

        from langchain_community.llms import ChatGLM
        endpoint_url = (
            "http://127.0.0.1:8000"
        )
        ChatGLM_llm = ChatGLM(
            endpoint_url=endpoint_url
        )
zhttp://127.0.0.1:8000/endpoint_urlNmodel_kwargsi N  	max_tokeng?temperaturehistorygffffff?top_pFwith_historyreturnc                     g)Nchat_glm )selfs    X/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/llms/chatglm.py	_llm_typeChatGLM._llm_type+   s        c                 T    U R                   =(       d    0 n0 SU R                  0ESU0E$ )zGet the identifying parameters.r   r   )r   r   )r   _model_kwargss     r   _identifying_paramsChatGLM._identifying_params/   s>     ))/R
t001
}-
 	
r   promptstoprun_managerkwargsc                    U R                   =(       d    0 nSS0nUU R                  U R                  U R                  U R                  S.nUR                  U5        UR                  U5        [        R                  SU 35         [        R                  " U R                  XgS9n[        R                  SU 35        UR                  S	:w  a  [        S
U 35      e UR                  5       n
[!        U
["        5      (       a  SnX;   a  X   nO[        SU
 35      e[        SU
 35      eUb  [)        X5      nU R*                  (       a
  U
S   U l        U$ ! [        R                  R                   a  n	[        SU	 35      eSn	A	ff = f! [        R                  R$                   a   n	[        SU	 SUR&                   35      eSn	A	ff = f)a/  Call out to a ChatGLM LLM inference endpoint.

Args:
    prompt: The prompt to pass into the model.
    stop: Optional list of stop words to use when generating.

Returns:
    The string generated by the model.

Example:
    .. code-block:: python

        response = chatglm_llm.invoke("Who are you?")
zContent-Typezapplication/json)r!   r   r   
max_lengthr   zChatGLM payload: )headersjsonz$Error raised by inference endpoint: NzChatGLM response:    zFailed with response: responsezNo content in response : zUnexpected response type: z?Error raised during decoding response from inference endpoint: z.
Response: r   )r   r   r   r   r   updateloggerdebugrequestspostr   
exceptionsRequestException
ValueErrorstatus_coder(   
isinstancedictJSONDecodeErrortextr	   r   )r   r!   r"   r#   r$   r   r'   payloadr*   eparsed_responsecontent_keysr7   s                r   _callChatGLM._call8   s   , ))/R "#56 ++||..ZZ
 	}%v(	23	I}}T%6%6VH 	)(453&5hZ@AA	&mmoO /400)2*8D$'@@Q%RSS #=o=N!OPP &t2D*95DL? ""33 	ICA3GHH	I* ""22 	QRSQT U'}}o/ 	s6   E 0F F F1E??FG%G  G)r   )NN)__name__
__module____qualname____firstlineno____doc__r   str__annotations__r   r   r5   r   intr   floatr   r   r   r   boolpropertyr   r   r   r   r   r<   __static_attributes__r   r   r   r   r      s    1L#0#'L(4.'1Is1K-GT$Z%E50L$'3   
WS#X%6 
 
 %):>	II tCy!I 67	I
 I 
I Ir   r   )loggingtypingr   r   r   r   r.   langchain_core.callbacksr   #langchain_core.language_models.llmsr   langchain_community.llms.utilsr	   	getLoggerr>   r,   r   r   r   r   <module>rP      s8     / /  = 3 >			8	$tc tr   