
    dh2                    0   S SK Jr  S SKrS SKJrJr  S SKJr  S SKJ	r	J
r
JrJrJrJr  S SKJr  S SKJr  S SKJr  S S	KJr  S S
KJrJrJr  S SKJr  Sr " S S\5      r " S S\5      r " S S\5      r  " S S\5      r! " S S\\5      r" " S S\\"5      r#g)    )annotationsN)ABCabstractmethod)Enum)AnyDictIteratorListMappingOptional)CallbackManagerForLLMRun)LLM)GenerationChunk)pre_init)	BaseModel
ConfigDictField)enforce_stop_tokenszocid1.generativeaiendpointc                  F    \ rS rSr\\SS j5       5       r\SS j5       rSrg)Provider   c                    g N selfs    b/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/llms/oci_generative_ai.pystop_sequence_keyProvider.stop_sequence_key   s    (+    c                    g r   r   r   responses     r   completion_response_to_text$Provider.completion_response_to_text   s    ADr    r   Nreturnstrr#   r   r'   r(   )	__name__
__module____qualname____firstlineno__propertyr   r   r$   __static_attributes__r   r    r   r   r      s&    +  +D Dr    r   c                  8    \ rS rSr% SrS\S'   S	S jrS
S jrSrg)CohereProvider   stop_sequencesr(   r   c                2    SSK Jn  UR                  U l        g Nr   models)oci.generative_ai_inferencer7   CohereLlmInferenceRequestllm_inference_requestr   r7   s     r   __init__CohereProvider.__init__   s    6%+%E%E"r    c                \    UR                   R                  R                  S   R                  $ Nr   )datainference_responsegenerated_textstextr"   s     r   r$   *CohereProvider.completion_response_to_text$   s#    }}//??BGGGr    r:   Nr'   Noner)   	r*   r+   r,   r-   r   __annotations__r<   r$   r/   r   r    r   r1   r1      s    -s-F
Hr    r1   c                  8    \ rS rSr% SrS\S'   S	S jrS
S jrSrg)MetaProvider(   stopr(   r   c                2    SSK Jn  UR                  U l        g r5   )r8   r7   LlamaLlmInferenceRequestr:   r;   s     r   r<   MetaProvider.__init__+   s    6%+%D%D"r    c                \    UR                   R                  R                  S   R                  $ r?   )r@   rA   choicesrC   r"   s     r   r$   (MetaProvider.completion_response_to_text0   s#    }}//77:???r    rE   NrF   r)   rH   r   r    r   rK   rK   (   s    #s#E
@r    rK   c                  (    \ rS rSrSrSrSrSrSrSr	g)	OCIAuthType4   z'OCI authentication types as enumerator.            r   N)
r*   r+   r,   r-   __doc__API_KEYSECURITY_TOKENINSTANCE_PRINCIPALRESOURCE_PRINCIPALr/   r   r    r   rU   rU   4   s    1GNr    rU   c                     \ rS rSr% Sr\" SSS9rS\S'   SrS	\S
'    Sr	S	\S'    Sr
S	\S'    SrS	\S'    SrS	\S'    SrS\S'    SrS	\S'    SrS	\S'    SrS\S'    \" SSSS9r\SS j5       r\SS j5       rS S jrSrg)!OCIGenAIBase=   zBase class for OCI GenAI modelsNT)defaultexcluder   clientr\   zOptional[str]	auth_typeDEFAULTauth_profilez~/.oci/configauth_file_locationmodel_idproviderzOptional[Dict]model_kwargsservice_endpointcompartment_idFbool	is_streamforbidr   )extraarbitrary_types_allowedprotected_namespacesc                  ^ US   b  U$  SSK m0 SUS   TR                  R                  SS.nUS   [        S5      R                  :X  a6  TR
                  R                  US	   US
   S9US'   UR                  SS5        OUS   [        S5      R                  :X  a<      SU4S jjnTR
                  R                  US	   US
   S9US'   U" US   S9US'   OUS   [        S5      R                  :X  a(  TR                  R                  R                  5       US'   OVUS   [        S5      R                  :X  a(  TR                  R                  R                  5       US'   O[        SUS    S35      eTR                  R                  " S0 UD6US'   U$ ! [         a  n[!        S5      UeSnAf["         a  n[        SU5      UeSnAff = f)zBValidate that OCI config and python package exists in environment.re   Nr   rm   )
      )configsignerrm   retry_strategytimeoutrf   rW   ri   rh   )file_locationprofile_namerx   ry   rX   c                >  > TR                   R                  U R                  S5      S 5      n[        [	        U R                  S5      5      SS9 nUR                  5       nS S S 5        TR                  R                  R                  WU5      $ ! , (       d  f       N4= f)Nkey_filesecurity_token_filezutf-8)encoding)	ry   load_private_key_from_filegetopenr(   readauthsignersSecurityTokenSigner)
oci_configpkf	st_stringocis       r   make_security_token_signerEOCIGenAIBase.validate_environment.<locals>.make_security_token_signer   s     >>"z2DB JNN+@ABW$%FFH	 88++??	2NN	 s   B
B)r   rY   rZ   z)Please provide valid value to auth_type, z is not valid.zYCould not import oci python package. Please make sure you have the oci package installed.zCould not authenticate with OCI client.
                If INSTANCE_PRINCIPAL or RESOURCE_PRINCIPAL is used, 
                please check the specified
                auth_profile, auth_file_location and auth_type are valid.)r   zdict[str, Any]r'   z&'oci.auth.signers.SecurityTokenSigner'r   )r   retryDEFAULT_RETRY_STRATEGYrU   namerx   	from_filepopr   r   %InstancePrincipalsSecurityTokenSignerget_resource_principals_signer
ValueErrorgenerative_ai_inferenceGenerativeAiInferenceClientImportErrorModuleNotFoundError	Exception)clsvaluesclient_kwargsr   exer   s         @r   validate_environment!OCIGenAIBase.validate_environmentp   s.   
 ('MD	 $*+=$>"%))"B"B$M k"k!n&9&99*-***>*>"()=">!'!7 +? +h' !!(D1$A(;(;;
O .
O;
O +.***>*>"()=">!'!7 +? +h' +E,X6+h' $A(;(;;HH$$JJL h' $A(;(;;HH$$CCE h' !?k*+>; 
  #::VV   F8$   	%G   	M  	s$   FF 
GF))G6GGc                8    U R                   =(       d    0 n0 SU0E$ )zGet the identifying parameters.rl   )rl   )r   _model_kwargss     r   _identifying_params OCIGenAIBase._identifying_params   s+     ))/R
}-
 	
r    c                   U R                   b  U R                   nODU R                  c  [        S5      eU R                  R                  S5      S   R	                  5       nX!;  a  [        SU R                   S35      eX   $ )Nzmodel_id is required to derive the provider, please provide the provider explicitly or specify the model_id to derive the provider..r   z(Invalid provider derived from model_id: zL Please explicitly pass in the supported provider when using custom endpoint)rk   rj   r   splitlower)r   provider_maprk   s      r   _get_providerOCIGenAIBase._get_provider   s    ==$}}H}}$ ; 
 }}**3/288:H':4==/ J- - 
 %%r    )r   r   r'   r   r'   Mapping[str, Any])r   r   r'   r   )r*   r+   r,   r-   r[   r   re   rI   rf   rh   ri   rj   rk   rl   rm   rn   rp   r   model_configr   r   r.   r   r   r/   r   r    r   ra   ra   =   s    )d3FC3(I}( #,L-+ )87 #Hm"7"Hm"
 $(L.'0&*m*$(NM(It12L M M^ 
 
&r    ra   c                      \ rS rSrSr\" SSS9r\SS j5       r\SS j5       r	\SS j5       r
        SS	 jrSS
 jr  S         SS jjr  S         SS jjrSrg)OCIGenAI   ad  OCI large language models.

To authenticate, the OCI client uses the methods described in
https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm

The authentifcation method is passed through auth_type and should be one of:
API_KEY (default), SECURITY_TOKEN, INSTANCE_PRINCIPAL, RESOURCE_PRINCIPAL

Make sure you have the required policies (profile/roles) to
access the OCI Generative AI service.
If a specific config profile is used, you must pass
the name of the profile (from ~/.oci/config) through auth_profile.
If a specific config file location is used, you must pass
the file location where profile name configs present
through auth_file_location

To use, you must provide the compartment id
along with the endpoint url, and model id
as named parameters to the constructor.

Example:
    .. code-block:: python

        from langchain_community.llms import OCIGenAI

        llm = OCIGenAI(
                model_id="MY_MODEL_ID",
                service_endpoint="https://inference.generativeai.us-chicago-1.oci.oraclecloud.com",
                compartment_id="MY_OCID"
            )
rq   T)rr   rs   c                    g)zReturn type of llm.oci_generative_ai_completionr   r   s    r   	_llm_typeOCIGenAI._llm_type  s     .r    c                ,    [        5       [        5       S.$ )zGet the provider map)coheremeta)r1   rK   r   s    r   _provider_mapOCIGenAI._provider_map  s     %& N
 	
r    c                4    U R                  U R                  S9$ )z Get the internal provider object)r   )r   r   r   s    r   	_providerOCIGenAI._provider  s     !!t/A/A!BBr    c           	        SSK Jn  U R                  =(       d    0 nUb  X%U R                  R                  '   U R
                  c  [        S5      eU R
                  R                  [        5      (       a  UR                  U R
                  S9nOUR                  U R
                  S9n0 UEUEnXS'   U R                  US'   UR                  U R                  UU R                  R                  " S	0 UD6S9nU$ )
Nr   r6   zDmodel_id is required to call the model, please provide the model_id.)endpoint_id)rj   promptrp   )rn   serving_modeinference_requestr   )r8   r7   rl   r   r   rj   r   
startswithCUSTOM_ENDPOINT_PREFIXDedicatedServingModeOnDemandServingModerp   GenerateTextDetailsrn   r:   )	r   r   rM   kwargsr7   r   r   inference_paramsinvocation_objs	            r   _prepare_invocation_object#OCIGenAI._prepare_invocation_object  s     	7))/R>B$..::;== V  ==##$:;;!664==6QL!55t}}5ML6m6v6%+"(,%33..%"nnBBVEUV 4 
 r    c                X    U R                   R                  U5      nUb  [        X25      nU$ r   )r   r$   r   )r   r#   rM   rC   s       r   _process_responseOCIGenAI._process_response4  s+    ~~99(C&t2Dr    Nc                   U R                   (       a;  SnU R                  " XU40 UD6 H  nXVR                  -  nM     Ub  [        XR5      nU$ U R	                  XU5      nU R
                  R                  U5      nU R                  X5      $ )a#  Call out to OCIGenAI generate endpoint.

Args:
    prompt: The prompt to pass into the model.
    stop: Optional list of stop words to use when generating.

Returns:
    The string generated by the model.

Example:
    .. code-block:: python

       response = llm.invoke("Tell me a joke.")
 )rp   _streamrC   r   r   re   generate_textr   )	r   r   rM   run_managerr   rC   chunkr   r#   s	            r   _callOCIGenAI._call<  s    * >>DfKJ6J

" K*46K88vN;;,,^<%%h55r    c              +  n  #    SU l         U R                  XU5      nU R                  R                  U5      nUR                  R                  5        H_  n[        R                  " UR                  5      nSU;   a  US   n	OSn	[        U	S9n
U(       a  UR                  U
R                  U
S9  U
v   Ma     g7f)a  Stream OCIGenAI LLM on given prompt.

Args:
    prompt: The prompt to pass into the model.
    stop: Optional list of stop words to use when generating.

Returns:
    An iterator of GenerationChunks.

Example:
    .. code-block:: python

    response = llm.stream("Tell me a joke.")
TrC   r   )rC   )r   N)rp   r   re   r   r@   eventsjsonloadsr   on_llm_new_tokenrC   )r   r   rM   r   r   r   r#   event	json_loadevent_data_textr   s              r   r   OCIGenAI._stream]  s     , 88vN;;,,^<]]))+E

5::.I""+F"3"$#9E,,UZZu,EK ,s   B3B5)rp   r&   r   )r'   r   )r   r(   rM   Optional[List[str]]r   Dict[str, Any]r'   r   )r#   r   rM   r   r'   r(   )NN)
r   r(   rM   r   r   "Optional[CallbackManagerForLLMRun]r   r   r'   r(   )
r   r(   rM   r   r   r   r   r   r'   zIterator[GenerationChunk])r*   r+   r,   r-   r[   r   r   r.   r   r   r   r   r   r   r   r/   r   r    r   r   r      s   @  $L
 . . 
 
 C C!4>L	> %):>	66 "6 8	6
 6 
6H %):>	## "# 8	#
 # 
## #r    r   )$
__future__r   r   abcr   r   enumr   typingr   r   r	   r
   r   r   langchain_core.callbacksr   #langchain_core.language_models.llmsr   langchain_core.outputsr   langchain_core.utilsr   pydanticr   r   r   langchain_community.llms.utilsr   r   r   r1   rK   rU   ra   r   r   r    r   <module>r      s    "  #  ? ? = 3 2 ) 1 1 >5 Es E	HX 	H	@8 	@$ ]&9c ]&@csL cr    