
    dh                     r    S r SSKrSSKJrJrJrJr  SSKJrJ	r	  SSK
Jr  SSKJr  SSKJr   " S S	\5      rg)
zPromptLayer wrapper.    N)AnyDictListOptional)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)BaseMessage)
ChatResult)
ChatOpenAIc                   D  ^  \ rS rSr% Sr\\\      \S'   Sr	\\
   \S'   \S\
4S j5       r   SS\\   S	\\\      S
\\   S\\
   S\S\4U 4S jjjr   SS\\   S	\\\      S
\\   S\\
   S\S\4U 4S jjjr\S\4S j5       r\S\\\4   4U 4S jj5       rSrU =r$ )PromptLayerChatOpenAI   a$  `PromptLayer` and `OpenAI` Chat large language models API.

To use, you should have the ``openai`` and ``promptlayer`` python
package installed, and the environment variable ``OPENAI_API_KEY``
and ``PROMPTLAYER_API_KEY`` set with your openAI API key and
promptlayer key respectively.

All parameters that can be passed to the OpenAI LLM can also
be passed here. The PromptLayerChatOpenAI adds to optional

parameters:
    ``pl_tags``: List of strings to tag the request with.
    ``return_pl_id``: If True, the PromptLayer request ID will be
        returned in the ``generation_info`` field of the
        ``Generation`` object.

Example:
    .. code-block:: python

        from langchain_community.chat_models import PromptLayerChatOpenAI
        openai = PromptLayerChatOpenAI(model="gpt-3.5-turbo")
pl_tagsFreturn_pl_idreturnc                     g)NF )clss    j/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/chat_models/promptlayer_openai.pyis_lc_serializable(PromptLayerChatOpenAI.is_lc_serializable+   s        messagesstoprun_managerstreamkwargsc                   > SSK JnJn  [        R                  R	                  5       R                  5       n[        TU ]  " XU4SU0UD6n	[        R                  R	                  5       R                  5       n
[        TU ]!  X5      u  p[        U	R                  5       H  u  p[        TU ]!  UR                  /U5      u  p0 UEUEnU" SSUUU R                  UUU
U" 5       U R                  S9
nU R                  (       d  Mc  UR                  b  [        UR                  [         5      (       d  0 Ul        UUR                  S'   M     U	$ )zJCall ChatOpenAI generate and then call PromptLayer API to log the request.r   )get_api_keypromptlayer_api_requestr   zlangchain.PromptLayerChatOpenAI	langchainr   pl_request_id)promptlayer.utilsr   r    datetimenow	timestampsuper	_generate_create_message_dicts	enumerategenerationsmessager   r   generation_info
isinstancedict)selfr   r   r   r   r   r   r    request_start_timegenerated_responsesrequest_end_timemessage_dictsparamsi
generationresponse_dictr#   	__class__s                    r   r)   PromptLayerChatOpenAI._generate/   sO    	K%..224>>@#g/K
06
:@
 $,,002<<> % =h M&':'F'FGMA$)G$A##$d%!M *)&)F31" !..M    --5Z..> > 24J.>K
**?;- H. #"r   c                   >#    SSK JnJn  [        R                  R	                  5       R                  5       n[        TU ]  " XU4SU0UD6I Sh  vN n	[        R                  R	                  5       R                  5       n
[        TU ]!  X5      u  p[        U	R                  5       H  u  p[        TU ]!  UR                  /U5      u  p0 UEUEnU" SSUUU R                  UUU
U" 5       U R                  S9
I Sh  vN nU R                  (       d  Mk  UR                  b  [        UR                  [         5      (       d  0 Ul        UUR                  S'   M     U	$  GN
 Nb7f)	z;Call ChatOpenAI agenerate and then call PromptLayer to log.r   )r   promptlayer_api_request_asyncr   Nz%langchain.PromptLayerChatOpenAI.asyncr!   r"   r#   )r$   r   r=   r%   r&   r'   r(   
_agenerater*   r+   r,   r-   r   r   r.   r/   r0   )r1   r   r   r   r   r   r   r=   r2   r3   r4   r5   r6   r7   r8   r9   r#   r:   s                    r   r>    PromptLayerChatOpenAI._agenerateY   sf     	Q%..224>>@$)G$6K%
06%
:@%
 
 $,,002<<> % =h M&':'F'FGMA$)G$A##$d%!M *)&)F"?7" !..# M    --5Z..> > 24J.>K
**?;- H. #"9
s,   AEEB)E9E:EA	EEc                     g)Nzpromptlayer-openai-chatr   )r1   s    r   	_llm_typePromptLayerChatOpenAI._llm_type   s    (r   c                 N   > 0 [         TU ]  EU R                  U R                  S.E$ )N)r   r   )r(   _identifying_paramsr   r   )r1   r:   s    r   rD   )PromptLayerChatOpenAI._identifying_params   s/    
g)
|| --
 	
r   r   )NNN)__name__
__module____qualname____firstlineno____doc__r   r   str__annotations__r   boolclassmethodr   r	   r   r   r
   r)   r   r>   propertyrA   r   rD   __static_attributes____classcell__)r:   s   @r   r   r      sN   . d3i  #(L(4.(4   %):>!%(#{#(# tCy!(# 67	(#
 (# (# 
(# (#Z %)?C!%(#{#(# tCy!(# ;<	(#
 (# (# 
(# (#T )3 ) ) 
T#s(^ 
 
r   r   )rJ   r%   typingr   r   r   r   langchain_core.callbacksr   r   langchain_core.messagesr	   langchain_core.outputsr
   langchain_community.chat_modelsr   r   r   r   r   <module>rW      s/      , , 0 - 6}
J }
r   