
    dhO                       S r SSKJr  SSKrSSKJr  SSKJrJrJ	r	J
r
JrJrJrJrJrJrJr  SSKJr  SSKJr  SSKJr  SS	KJrJr  SS
KJrJrJrJrJ r J!r!J"r"J#r#J$r$J%r%J&r&J'r'  SSK(J)r)  SSK*J+r+J,r,  SSK-J.r.J/r/J0r0  SSK1J2r2J3r3J4r4  SSK5J6r6J7r7  SSK8J9r9  SSK:J;r;J<r<J=r=J>r>J?r?  SSK@JArA  \" S\;S9rB\\\C\4   \\B   \4   rD\\\B4   rE\R                  " \G5      rHSS jrISS jrJ\" SSSS9 " S S\5      5       rKg)zWrapper around Perplexity APIs.    )annotationsN)
itemgetter)AnyDictIteratorListLiteralMappingOptionalTupleTypeTypeVarUnion)
deprecated)CallbackManagerForLLMRun)LanguageModelInput)BaseChatModelgenerate_from_stream)	AIMessageAIMessageChunkBaseMessageBaseMessageChunkChatMessageChatMessageChunkFunctionMessageChunkHumanMessageHumanMessageChunkSystemMessageSystemMessageChunkToolMessageChunk)UsageMetadata)JsonOutputParserPydanticOutputParser)ChatGenerationChatGenerationChunk
ChatResult)RunnableRunnableMapRunnablePassthrough)from_envget_pydantic_field_names)is_basemodel_subclass)	BaseModel
ConfigDictFieldTypeAdaptermodel_validator)Self_BM)boundc                F    [        U [        5      =(       a    [        U 5      $ N)
isinstancetyper,   )objs    b/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/chat_models/perplexity.py_is_pydantic_classr;   <   s    c4 ?%:3%??    c                    U R                  SS5      nU R                  SS5      nU R                  SX-   5      n[        UUUS9$ )Nprompt_tokensr   completion_tokenstotal_tokensinput_tokensoutput_tokensr@   )getr!   )token_usagerB   rC   r@   s       r:   _create_usage_metadatarF   @   sL    ???A6LOO$7;M??><3OPL!#! r<   z0.3.21z1.0z#langchain_perplexity.ChatPerplexity)sinceremovalalternative_importc                     \ rS rSr% SrSrS\S'   SrS\S'    S	rS
\S'    \	" \
S9rS\S'    \	" \" SSS9SS9rS\S'    \	" SSS9rS\S'    SrS\S'    SrS\S'    SrS\S '    \" S!S"9r\S5S# j5       r\" S$S%9\S6S& j5       5       r\" S'S%9S7S( j5       r\S8S) j5       rS9S* jr      S:S+ jr      S;S, jr  S<         S=S- jjr  S<         S>S. jjr\S?S/ j5       r \S@S0 j5       r! SAS1SSS2.           SBS3 jjjr"S4r#g)CChatPerplexityK   a
  `Perplexity AI` Chat models API.

Setup:
    To use, you should have the ``openai`` python package installed, and the
    environment variable ``PPLX_API_KEY`` set to your API key.
    Any parameters that are valid to be passed to the openai.create call
    can be passed in, even if not explicitly saved on this class.

    .. code-block:: bash

        pip install openai
        export PPLX_API_KEY=your_api_key

    Key init args - completion params:
        model: str
            Name of the model to use. e.g. "llama-3.1-sonar-small-128k-online"
        temperature: float
            Sampling temperature to use. Default is 0.7
        max_tokens: Optional[int]
            Maximum number of tokens to generate.
        streaming: bool
            Whether to stream the results or not.

    Key init args - client params:
        pplx_api_key: Optional[str]
            API key for PerplexityChat API. Default is None.
        request_timeout: Optional[Union[float, Tuple[float, float]]]
            Timeout for requests to PerplexityChat completion API. Default is None.
        max_retries: int
            Maximum number of retries to make when generating.

    See full list of supported init args and their descriptions in the params section.

    Instantiate:
        .. code-block:: python

            from langchain_community.chat_models import ChatPerplexity

            llm = ChatPerplexity(
                model="llama-3.1-sonar-small-128k-online",
                temperature=0.7,
            )

    Invoke:
        .. code-block:: python

            messages = [
                ("system", "You are a chatbot."),
                ("user", "Hello!")
            ]
            llm.invoke(messages)

    Invoke with structured output:
        .. code-block:: python

            from pydantic import BaseModel

            class StructuredOutput(BaseModel):
                role: str
                content: str

            llm.with_structured_output(StructuredOutput)
            llm.invoke(messages)

    Invoke with perplexity-specific params:
        .. code-block:: python

            llm.invoke(messages, extra_body={"search_recency_filter": "week"})

    Stream:
        .. code-block:: python

            for chunk in llm.stream(messages):
                print(chunk.content)

    Token usage:
        .. code-block:: python

            response = llm.invoke(messages)
            response.usage_metadata

    Response metadata:
        .. code-block:: python

            response = llm.invoke(messages)
            response.response_metadata

Nr   clientz!llama-3.1-sonar-small-128k-onlinestrmodelgffffff?floattemperature)default_factoryDict[str, Any]model_kwargsPPLX_API_KEY)defaultapi_key)rR   aliaszOptional[str]pplx_api_keytimeout)rX   z+Optional[Union[float, Tuple[float, float]]]request_timeout   intmax_retriesFbool	streamingzOptional[int]
max_tokensT)populate_by_namec                
    SS0$ )NrY   rU    selfs    r:   
lc_secretsChatPerplexity.lc_secrets   s    //r<   before)modec           
     p   [        U 5      nUR                  S0 5      n[        U5       HP  nXC;   a  [        SU S35      eXB;  d  M  [        R                  SU SU SU S35        UR                  U5      X4'   MR     UR                  UR                  5       5      nU(       a  [        SU S	35      eX1S'   U$ )
z>Build extra kwargs from additional params that were passed in.rT   zFound z supplied twice.z	WARNING! z1 is not a default parameter.
                    zJ was transferred to model_kwargs.
                    Please confirm that z is what you intended.zParameters za should be specified explicitly. Instead they were passed in as part of `model_kwargs` parameter.)	r+   rD   list
ValueErrorloggerwarningpopintersectionkeys)clsvaluesall_required_field_namesextra
field_nameinvalid_model_kwargss         r:   build_extraChatPerplexity.build_extra   s     $<C#@ 

>2.v,J" 6*5E!FGG9!* .L !))34JN
 %+JJz$:! '  8DDUZZ\R23 4S T 
 "'~r<   afterc                     SSK n UR                  U R                  SS9U l        U $ ! [         a    [        S5      ef = f! [
         a    [        S5      ef = f)z?Validate that api key and python package exists in environment.r   NzTCould not import openai python package. Please install it with `pip install openai`.zhttps://api.perplexity.ai)rW   base_urlz`openai` has no `ChatCompletion` attribute, this is likely due to an old version of the openai package. Try upgrading it with `pip install --upgrade openai`.)openaiImportErrorOpenAIrY   rM   AttributeErrorrm   )rf   r~   s     r:   validate_environment#ChatPerplexity.validate_environment   s|    			 --))4O ( DK   	? 	  	7 	s   ( A >Ac                b    U R                   U R                  U R                  S.U R                  E$ )z:Get the default parameters for calling PerplexityChat API.)ra   streamrQ   )ra   r`   rQ   rT   re   s    r:   _default_paramsChatPerplexity._default_params   s5     //nn++
 	
 	
r<   c                b   [        U[        5      (       a  UR                  UR                  S.nU$ [        U[        5      (       a  SUR                  S.nU$ [        U[
        5      (       a  SUR                  S.nU$ [        U[        5      (       a  SUR                  S.nU$ [        SU 35      e)N)rolecontentsystemuser	assistantzGot unknown type )r7   r   r   r   r   r   r   	TypeError)rf   messagemessage_dicts      r:   _convert_message_to_dict'ChatPerplexity._convert_message_to_dict  s    g{++$+LLW__ML  //$,IL  ..$*wGL
 	 ++$/GOOLL  /y9::r<   c                    [        U R                  5      nUb  SU;   a  [        S5      eX#S'   U Vs/ sH  o@R                  U5      PM     nnXS4$ s  snf )Nstopz2`stop` found in both the input and default params.)dict_invocation_paramsrm   r   )rf   messagesr   paramsmmessage_dictss         r:   _create_message_dicts$ChatPerplexity._create_message_dicts  sa     d--. !UVV!6NCKL8a66q98L$$ Ms   Ac                P   UR                  S5      nUR                  S5      =(       d    Sn0 nUR                  S5      (       a#  [        US   5      nSU;   a  US   c  SUS'   XeS'   UR                  S5      (       a  US   US'   US:X  d
  U[        :X  a	  [        US9$ US	:X  d
  U[        :X  a	  [        XES
9$ US:X  d
  U[        :X  a	  [	        US9$ US:X  d
  U[
        :X  a  [        XAS   S9$ US:X  d
  U[        :X  a  [        XAS   S9$ U(       d
  U[        :X  a	  [        XCS9$ U" US9$ )Nr   r    function_callname
tool_callsr   )r   r   )r   additional_kwargsr   function)r   r   tooltool_call_id)r   r   )r   r   )rD   r   r   r   r   r   r    r   )rf   _dictdefault_classr   r   r   r   s          r:   _convert_delta_to_message_chunk.ChatPerplexity._convert_delta_to_message_chunk  s:    yy ))I&,""$99_%% !78M&=+@+H(*f%1>o.99\"".3L.Al+6>].??$W55[ M^$C!'WWX2D!D%g66Z=4H#H'FmLLV^}0@@#GBWXX]&66#G?? 11r<   c              +    #    U R                  X5      u  pV0 UEUEn[        nUR                  SS 5        U(       a  X&S'   U R                  R                  R
                  R                  " SUSS.UD6nSn	S n
U GH~  n[        U[        5      (       d  UR                  5       nUR                  S5      =n(       a9  [        U5      nU
(       a"  US   U
S   -
  US   U
S   -
  US   U
S   -
  S	.nOUnUn
OS n[        US
   5      S:X  a  M  US
   S   n0 nU	(       a.  UR                  S/ 5      US'   S H  nUU;   d  M  UU   UU'   M     U R                  US   U5      n[        U[        5      (       a  U(       a  Xl        U	(       a  U=R                  U-  sl        Sn	UR                  S5      nUb	  [        US9OS nUR                  n[!        UUS9nU(       a  UR#                  UR$                  US9  Uv   GM     g 7f)Nr   stop_sequencesT)r   r   usagerB   rC   r@   rA   choicesr   	citationsimagesrelated_questionsdeltaFfinish_reason)r   )r   generation_info)chunkrd   )r   r   rp   rM   chatcompletionscreater7   r   rD   rF   lenr   usage_metadatar   	__class__r%   on_llm_new_tokentext)rf   r   r   run_managerkwargsr   r   default_chunk_classstream_respfirst_chunkprev_total_usager   total_usagelc_total_usager   choicer   attrr   r   s                       r:   _streamChatPerplexity._stream9  s.     !% : :8 J%F%f%,

8T"'+#$kk&&2299 
"4
39
 48 EeT**

#ii00{0!7!D#(6~(F*>:);)7)H*?;*<(6~(F*>:);?N &4N#1 !%5#$)9%a(F "16;1K!+.;Du}27+)$/ < 88w!4E %00^'5$''+<<'#"JJ7M5B5N=1TX  #(//'WE,,UZZu,EKe !s   D5G<;CG<c                N   U R                   (       a'  U R                  " U4X#S.UD6nU(       a  [        U5      $ U R                  X5      u  pg0 UEUEnU R                  R
                  R                  R                  " S
SU0UD6n[        USS 5      =n	(       a  [        U	R                  5       5      n
OS n
SUR                  0nS H"  n[        X5      (       d  M  [        X5      X'   M$     [        UR                  S   R                  R                   UU
S9n[#        [%        US9/S	9$ )N)r   r   r   r   r   r   r   )r   r   r   )r   )generationsrd   )r`   r   r   r   rM   r   r   r   getattrrF   
model_dumpr   hasattrr   r   r   r   r&   r$   )rf   r   r   r   r   stream_iterr   r   responser   r   r   r   r   s                 r:   	_generateChatPerplexity._generate  s(    >>,,#@FK +K88 $ : :8 J%F%f%;;##//66XXQWXHgt44543E4D4D4FGN!N((*<*<=3Dx&&*1(*A!' 4 $$Q'//77/)

 ~g'F&GHHr<   c                >    SU R                   0n0 UEU R                  E$ )z,Get the parameters used to invoke the model.rO   )rO   r   )rf   
pplx_credss     r:   r   !ChatPerplexity._invocation_params  s,     TZZ&

 6*5 4 455r<   c                    g)zReturn type of chat model.perplexitychatrd   re   s    r:   	_llm_typeChatPerplexity._llm_type  s      r<   json_schema)methodinclude_rawstrictc                  US;   a  SnUS:X  a  Uc  [        S5      e[        U5      nU(       a"  [        US5      (       a  UR                  5       nOdU(       a  UR	                  5       nOL[        U[        5      (       a  UnO4[        U5      R                  S:X  a  [        U5      nUR                  5       nU R                  SSW0S.S9n	U(       a	  [        US	9O	[        5       n
O[        S
U S35      eU(       aT  [        R                  " [!        S5      U
-  S S9n[        R                  " S S9nUR#                  U/SS9n[%        U	S9U-  $ X-  $ )a  Model wrapper that returns outputs formatted to match the given schema for Preplexity.
Currently, Preplexity only supports "json_schema" method for structured output
as per their official documentation: https://docs.perplexity.ai/guides/structured-outputs

Args:
    schema:
        The output schema. Can be passed in as:

        - a JSON Schema,
        - a TypedDict class,
        - or a Pydantic class

    method: The method for steering model generation, currently only support:

        - "json_schema": Use the JSON Schema to parse the model output


    include_raw:
        If False then only the parsed structured output is returned. If
        an error occurs during model output parsing it will be raised. If True
        then both the raw model response (a BaseMessage) and the parsed model
        response will be returned. If an error occurs during output parsing it
        will be caught and returned as well. The final output is always a dict
        with keys "raw", "parsed", and "parsing_error".

    kwargs: Additional keyword args aren't supported.

Returns:
    A Runnable that takes same inputs as a :class:`langchain_core.language_models.chat.BaseChatModel`.

    | If ``include_raw`` is False and ``schema`` is a Pydantic class, Runnable outputs an instance of ``schema`` (i.e., a Pydantic object). Otherwise, if ``include_raw`` is False then Runnable outputs a dict.

    | If ``include_raw`` is True, then Runnable outputs a dict with keys:

    - "raw": BaseMessage
    - "parsed": None if there was a parsing error, otherwise the type depends on the ``schema`` as described above.
    - "parsing_error": Optional[BaseException]

)function_calling	json_moder   zIschema must be specified when method is not 'json_schema'. Received None.model_json_schema_TypedDictMetaschema)r8   r   )response_format)pydantic_objectzSUnrecognized method argument. Expected 'json_schema' Received:                    ''rawc                    g r6   rd   _s    r:   <lambda>7ChatPerplexity.with_structured_output.<locals>.<lambda>  s    RVr<   )parsedparsing_errorc                    g r6   rd   r   s    r:   r   r     s    dr<   )r   r   )exception_key)r   )rm   r;   r   r   r   r7   r   r8   __name__r0   r   bindr#   r"   r)   assignr   with_fallbacksr(   )rf   r   r   r   r   r   is_pydantic_schemar   adapterllmoutput_parserparser_assignparser_noneparser_with_fallbacks                 r:   with_structured_output%ChatPerplexity.with_structured_output  s|   ` 66"F]"~ %  "4F!;!g+' ' #)":":"<#"(--/FD))"(f&&*::%f-")"5"5"7)))$,o#>!  C & %V<%'  XQ  
 /66!%(=8M .44NKK#0#?#?_ $@ $  3'*>>>&&r<   )rM   )returnzDict[str, str])rt   rS   r   r   )r   r2   )r   rS   )r   r   r   rS   )r   List[BaseMessage]r   Optional[List[str]]r   z+Tuple[List[Dict[str, Any]], Dict[str, Any]])r   Mapping[str, Any]r   zType[BaseMessageChunk]r   r   )NN)
r   r   r   r   r   "Optional[CallbackManagerForLLMRun]r   r   r   zIterator[ChatGenerationChunk])
r   r   r   r   r   r  r   r   r   r&   )r   r  )r   rN   r6   )r   zOptional[_DictOrPydanticClass]r   zLiteral['json_schema']r   r_   r   zOptional[bool]r   r   r   z-Runnable[LanguageModelInput, _DictOrPydantic])$r   
__module____qualname____firstlineno____doc__rM   __annotations__rO   rQ   r/   r   rT   r*   rY   r[   r^   r`   ra   r.   model_configpropertyrg   r1   classmethodry   r   r   r   r   r   r   r   r   r   r   __static_attributes__rd   r<   r:   rK   rK   K   si   Wr FC4E34K+#(#>L.>V"' >i#L- =CHIDO@  RK<It/ $J$/L 0 0 (#  $2 '" #* 
 
	%)	%1D	%	4	%2&27M2	2@ %):>	D#D "D 8	D
 D 
'DR %):>	I#I "I 8	I
 I 
IB 6 6     26`' *7!!%`'.`' '	`'
 `' `' `' 
7`' `'r<   rK   )r9   r   r   r_   )rE   r   r   r!   )Lr  
__future__r   loggingoperatorr   typingr   r   r   r   r	   r
   r   r   r   r   r   langchain_core._api.deprecationr   langchain_core.callbacksr   langchain_core.language_modelsr   *langchain_core.language_models.chat_modelsr   r   langchain_core.messagesr   r   r   r   r   r   r   r   r   r   r   r    langchain_core.messages.air!   langchain_core.output_parsersr"   r#   langchain_core.outputsr$   r%   r&   langchain_core.runnablesr'   r(   r)   langchain_core.utilsr*   r+   langchain_core.utils.pydanticr,   pydanticr-   r.   r/   r0   r1   typing_extensionsr2   r3   rN   _DictOrPydanticClass_DictOrPydantic	getLoggerr   rn   r;   rF   rK   rd   r<   r:   <module>r      s    % "      7 = =    5 P R R O O C P O "e9%T#s(^T#Y<= c	"			8	$@ 
<
}'] }'
}'r<   