
    dh.              
          S r SSKJrJrJrJrJrJr  SSKJ	r	J
r
  SSKJrJrJr  SSKJrJrJr  SSKJrJr  SSKJrJrJr  SSKJr  S	rS
rS\\   S\\\4   S\\\4   SS4S jrS\S\\   S\\\ 4   S\S\4
S jr! " S S\5      r"g)z Wrapper around Anyscale Endpoint    )AnyDictListMappingOptionalSet)AsyncCallbackManagerForLLMRunCallbackManagerForLLMRun)
GenerationGenerationChunk	LLMResult)convert_to_secret_strget_from_dict_or_envpre_init)Field	SecretStr)
BaseOpenAIacompletion_with_retrycompletion_with_retry)is_openai_v1z%https://api.endpoints.anyscale.com/v1z$mistralai/Mixtral-8x7B-Instruct-v0.1keysresponsetoken_usagereturnNc                     U R                  US   5      nU H&  nXB;  a  US   U   X$'   M  X$==   US   U   -  ss'   M(     g)zUpdate token usage.usageN)intersection)r   r   r   _keys_to_use_keys        Y/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/llms/anyscale.pyupdate_token_usager!      sS     $$Xg%67L" ( 1$ 7K'!24!88	     choicesprompts
model_namec                     / n[        U5       HO  u  pVX   nUR                  [        US   S   [        UR	                  S5      UR	                  S5      S9S9/5        MQ     X#S.n[        XHS9$ )	z2Create the LLMResult from the choices and prompts.messagecontentfinish_reasonlogprobs)r)   r*   )textgeneration_info)r   r%   )generations
llm_output)	enumerateappendr   dictgetr   )	r#   r$   r   r%   r-   i_choicer.   s	            r    create_llm_resultr6   +   s~     K'"	*95$(&,jj&A!'J!7%
	
 # "-GJDDr"   c                     ^  \ rS rSr% Sr \" \S9r\\	S'   \" \
" S5      S9r\
\	S'   \" \S9r\\	S'   \" \S9r\\	S	'   \S
\4S j5       r\S\S
\4S j5       r\S
\\\4   4U 4S jj5       r\S
\\\4   4U 4S jj5       r\S
\4S j5       r  SS\\   S\\\      S\\   S\S
\4
S jjr   SS\\   S\\\      S\\!   S\S
\4
S jjr"Sr#U =r$$ )AnyscaleA   a  Anyscale large language models.

To use, you should have the environment variable ``ANYSCALE_API_KEY``set with your
Anyscale Endpoint, or pass it as a named parameter to the constructor.
To use with Anyscale Private Endpoint, please also set ``ANYSCALE_BASE_URL``.

Example:
    .. code-block:: python
        from langchain.llms import Anyscale
        anyscalellm = Anyscale(anyscale_api_key="ANYSCALE_API_KEY")
        # To leverage Ray for parallel processing
        @ray.remote(num_cpus=1)
        def send_query(llm, text):
            resp = llm.invoke(text)
            return resp
        futures = [send_query.remote(anyscalellm, text) for text in texts]
        results = ray.get(futures)
defaultanyscale_api_base anyscale_api_keyr%   )default_factoryprefix_messagesr   c                     g)NF )clss    r    is_lc_serializableAnyscale.is_lc_serializable\   s    r"   valuesc                    [        USS[        S9US'   [        [        USS5      5      US'   [        USS[        S9US'    SS	Kn[        5       (       a  US   R                  5       US   S
.nUR                  S5      (       d  UR                  " S0 UD6R                  US'   UR                  S5      (       d  UR                  " S0 UD6R                  US'   O.US   US'   US   R                  5       US'   UR                  US'    US   (       a  US   S:  a  [        S5      eUS   (       a  US   S:  a  [        S5      eU$ ! [         a    [        S5      ef = f)z?Validate that api key and python package exists in environment.r<   ANYSCALE_API_BASEr:   r>   ANYSCALE_API_KEYr%   
MODEL_NAMEr   N)api_keybase_urlclientasync_clientopenai_api_baseopenai_api_keyzTCould not import openai python package. Please install it with `pip install openai`.	streamingn   z!Cannot stream results when n > 1.best_ofz'Cannot stream results when best_of > 1.rB   )r   DEFAULT_BASE_URLr   DEFAULT_MODELopenair   get_secret_valuer2   OpenAIcompletionsAsyncOpenAI
CompletionImportError
ValueError)rC   rF   rW   client_paramss       r    validate_environmentAnyscale.validate_environment`   s    ';$	'
"# &; );=OP&
!"  4!	 
|	~~%&89JJL &': ;
! zz(++'-}}'E}'E'Q'QF8$zz.11-3-?-? .'.!k >* -33F,G()+12D+E+V+V+X'(#)#4#4x  +6#;?@AA+6)#4q#8FGG  	? 	s   BE -E Ec                 8   > 0 SU R                   0E[        TU ]  E$ )zGet the identifying parameters.r%   )r%   super_identifying_params)self	__class__s    r    rd   Anyscale._identifying_params   s+    
T__-
g)
 	
r"   c                    > SU R                   0n[        5       (       d6  UR                  U R                  R	                  5       U R
                  S.5        0 UE[        TU ]  E$ )z,Get the parameters used to invoke the model.model)rK   api_base)r%   r   updater>   rX   r<   rc   _invocation_params)re   openai_credsrf   s     r    rl   Anyscale._invocation_params   sc     T__(
 ~~#44EEG $ 6 6 >,=%'"<==r"   c                     g)zReturn type of llm.zAnyscale LLMrB   )re   s    r    	_llm_typeAnyscale._llm_type   s     r"   r$   stoprun_managerkwargsc                 &   U R                   n0 UEUEnU R                  XQU5      n/ n0 n1 Skn	Sn
U GHJ  nU R                  (       a  [        U5      S:  a  [	        S5      eSnU R
                  " US   X#40 UD6 H  nUc  UnM
  X-  nM     Uc   eUR                  UR                  UR                  (       a  UR                  R                  S5      OSUR                  (       a  UR                  R                  S5      OSS.5        M  [        U 4US   US	.UD6n[        U[        5      (       d  UR                  5       nUR                  US
   5        [        XU5        U
(       a  GM9  UR                  S5      n
GMM     U R                  UUUUU
S9$ )a,  Call out to OpenAI's endpoint with k unique prompts.

Args:
    prompts: The prompts to pass into the model.
    stop: Optional list of stop words to use when generating.

Returns:
    The full LLM output.

Example:
    .. code-block:: python

        response = openai.generate(["Tell me a joke."])
>   total_tokensprompt_tokenscompletion_tokensNrS   ,Cannot stream results with multiple prompts.r   r)   r*   r+   r)   r*   promptrs   r#   system_fingerprintr}   )rl   get_sub_promptsrQ   lenr^   _streamr0   r+   r,   r2   r   
isinstancer1   extendr!   r6   re   r$   rr   rs   rt   paramssub_promptsr#   r   _keysr}   _prompts
generationchunkr   s                  r    	_generateAnyscale._generate   s   , ((%F%f%**6DA&( G,0#H~~x=1$$%STT8<
!\\(1+tSFSE!)%*
"+
	 T
 "--- *%55 *4)C)C)G)G)X!%55 %/$>$>$B$B:$N!
 1 $A; +	
  "(D11  (}}Hx	23"5K@)))16J)K&O $P %%1 & 
 	
r"   c                   #    U R                   n0 UEUEnU R                  XQU5      n/ n0 n1 Skn	Sn
U H  nU R                  (       aI  [        U5      S:  a  [	        S5      eSnU R
                  " US   X#40 UD6  Sh  vN nUc  UnM  X-  nM  [        U 4US   US	.UD6I Sh  vN n[        U[        5      (       d  UR                  5       nUR                  US
   5        [        XU5        M     U R                  UUUUU
S9$  N
 Uc   eUR                  UR                  UR                  (       a  UR                  R                  S5      OSUR                  (       a  UR                  R                  S5      OSS.5        GMU   N7f)z:Call out to OpenAI's endpoint async with k unique prompts.>   rv   rw   rx   NrS   ry   r   r)   r*   rz   r{   r#   r~   )rl   r   rQ   r   r^   _astreamr0   r+   r,   r2   r   r   r1   r   r!   r6   r   s                  r    
_agenerateAnyscale._agenerate  s     ((%F%f%**6DA&( G,0#H~~x=1$$%STT8<
#'==QK$6<$ ,% ")%*
"+
 "8" $A; +	"
 "  "(D11'}}Hx	23"5K@I $J %%1 & 
 	
?, $ "--- *%55 *4)C)C)G)G)X!%55 %/$>$>$B$B:$N!
s8   A8F
:D>D?D#F
%F&A F
DBF
rB   )NN)%__name__
__module____qualname____firstlineno____doc__r   rU   r<   str__annotations__r   r>   rV   r%   listr@   r   classmethodboolrD   r   r   r`   propertyr   r   rd   rl   rp   r   r
   r   r   r	   r   __static_attributes____classcell__)rf   s   @r    r8   r8   A   s   & 3"+;<s<"'	"">i>M2J2!$7OT74   5$ 54 5 5n 
WS#X%6 
 
 >DcN > > 3   %):>	M
cM
 tCy!M
 67	M

 M
 
M
d %)?C	<
c<
 tCy!<
 ;<	<

 <
 
<
 <
r"   r8   )#r   typingr   r   r   r   r   r   langchain_core.callbacksr	   r
   langchain_core.outputsr   r   r   langchain_core.utilsr   r   r   pydanticr   r   langchain_community.llms.openair   r   r    langchain_community.utils.openair   rU   rV   r   r!   intr6   r8   rB   r"   r    <module>r      s    &  J I V V % 
 :: 6	9
c(	9"38n	9;?S>	9		9EE9E37S>EOREE,~
z ~
r"   