
    dh                         S SK r S SKJrJrJrJrJr  S SKJr  S SK	J
r
  S SKJr  S SKJrJr  S SKJr  S SKJr  S	S
SSSS.r\" SSSS9 " S S\5      5       rg)    N)AnyDictListMappingOptional)
deprecated)CallbackManagerForLLMRun)LLM)get_from_dict_or_envpre_init)
ConfigDict)enforce_stop_tokenstranslation_textsummary_textgenerated_text)translationsummarizationconversationalztext-generationztext2text-generationz0.0.21z1.0z)langchain_huggingface.HuggingFaceEndpoint)removalalternative_importc                      \ rS rSr% SrSr\\S'   Sr\	\
   \S'    Sr\	\
   \S'    Sr\	\   \S'    Sr\	\
   \S'   \" S	S
9r\S\S\4S j5       r\S\\
\4   4S j5       r\S\
4S j5       r  SS\
S\	\\
      S\	\   S\S\
4
S jjrSrg)HuggingFaceHub   aS  HuggingFaceHub  models.
! This class is deprecated, you should use HuggingFaceEndpoint instead.

To use, you should have the ``huggingface_hub`` python package installed, and the
environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token, or pass
it as a named parameter to the constructor.

Supports `text-generation`, `text2text-generation`, `conversational`, `translation`,
 and `summarization`.

Example:
    .. code-block:: python

        from langchain_community.llms import HuggingFaceHub
        hf = HuggingFaceHub(repo_id="gpt2", huggingfacehub_api_token="my-api-key")
Nclientrepo_idtaskmodel_kwargshuggingfacehub_api_tokenforbid)extravaluesreturnc                 h   [        USS5      n SSKJnJn  US   nU" UUS9nUS   (       d4  U(       d  [	        S5      eU" US	9R                  US
9nUR                  US'   US   [        ;  a'  [	        SUS    S[        R                  5        S35      eXaS'   U$ ! [         a    [        S5      ef = f)z?Validate that api key and python package exists in environment.r   HUGGINGFACEHUB_API_TOKENr   )HfApiInferenceClientr   )modeltokenr   z1Must specify either `repo_id` or `task`, or both.)r(   )r   zGot invalid task z, currently only z are supportedr   zfCould not import huggingface_hub python package. Please install it with `pip install huggingface_hub`.)
r   huggingface_hubr%   r&   
ValueError
model_infopipeline_tagVALID_TASKS_DICTkeysImportError)clsr!   r   r%   r&   r   r   r+   s           `/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/llms/huggingface_hub.pyvalidate_environment#HuggingFaceHub.validate_environment?   s     $8.0J$
 	>Y'G$.F &>$K  #)ABMM# N 
 ",!8!8vf~%55 'v'7 8&&6&;&;&=%>nN   &8   	H 	s   B
B B1c                 j    U R                   =(       d    0 n0 U R                  U R                  S.ESU0E$ )zGet the identifying parameters.)r   r   r   )r   r   r   )self_model_kwargss     r1   _identifying_params"HuggingFaceHub._identifying_paramsd   s@     ))/R
,,		:
}-
 	
    c                     g)zReturn type of llm.r)    )r5   s    r1   	_llm_typeHuggingFaceHub._llm_typem   s     !r9   promptstoprun_managerkwargsc                    U R                   =(       d    0 n0 UEUEnU R                  R                  XS.U R                  S9n[        R
                  " UR                  5       5      nSU;   a  [        SUS    35      e[        U R                     n[        U[        5      (       a	  US   U   n	OXx   n	Ub  [        X5      n	U	$ )a&  Call out to HuggingFace Hub's inference endpoint.

Args:
    prompt: The prompt to pass into the model.
    stop: Optional list of stop words to use when generating.

Returns:
    The string generated by the model.

Example:
    .. code-block:: python

        response = hf("Tell me a joke.")
)inputs
parameters)jsonr   errorzError raised by inference API: r   )r   r   postr   rE   loadsdecoder*   r-   
isinstancelistr   )
r5   r>   r?   r@   rA   r6   rD   responseresponse_keytexts
             r1   _callHuggingFaceHub._callr   s    * ))/R000
;;##"=DII $ 
 ::hoo/0h>x?P>QRSS'		2h%%A;|,D)D 't2Dr9   r;   )NN)__name__
__module____qualname____firstlineno____doc__r   r   __annotations__r   r   strr   r   dictr   r   model_configr   r   r2   propertyr   r7   r<   r   r	   rO   __static_attributes__r;   r9   r1   r   r      s   " FC!GXc]!LD(3- $(L(4.'1.2hsm2L "$ "4 " "H 
WS#X%6 
 
 !3 ! ! %):>	)) tCy!) 67	)
 ) 
) )r9   r   )rE   typingr   r   r   r   r   langchain_core._api.deprecationr   langchain_core.callbacksr	   #langchain_core.language_models.llmsr
   langchain_core.utilsr   r   pydanticr   langchain_community.llms.utilsr   r-   r   r;   r9   r1   <module>rc      se     5 5 6 = 3 ?  >
 &#&',  B
S 
r9   