
    dh                     n    S SK r S SKJrJr  S SKJr  S SKJr  \ R                  " \	5      r
 " S S\5      rg)    N)AnyOptional)LLM)IpexLLMc                       \ rS rSrSr\ SSSSS.S\S\\   S\\   S	\	S
\\   S\
S\4S jjj5       r\ SSS.S\S\\   S\\   S\
S\4
S jjj5       r\S\4S j5       rSrg)BigdlLLM   zWrapper around the BigdlLLM model

Example:
    .. code-block:: python

        from langchain_community.llms import BigdlLLM
        llm = BigdlLLM.from_model_id(model_id="THUDM/chatglm-6b")
NT)tokenizer_idload_in_4bitload_in_low_bitmodel_idmodel_kwargsr
   r   r   kwargsreturnc                p   [         R                  S5         SSKJnJn  SSKJn	Jn
  Ub  [         R                  S5        U(       d  [        S5      eU=(       d    0 nU=(       d    Un U	R                  " U40 UD6n UR                  " U4SS	0UD6nS
U;   a-  UR                  5        VVs0 sH  u  nnUS
:w  d  M  UU_M     nnnU " SUUUUS.UD6$ ! [         a    [        S5      ef = f! [         a    U
R                  " U40 UD6n Nf = f! [         a    UR                  " U4SS	0UD6n Nf = fs  snnf )a  
Construct object from model_id

Args:
    model_id: Path for the huggingface repo id to be downloaded or
              the huggingface checkpoint folder.
    tokenizer_id: Path for the huggingface repo id to be downloaded or
              the huggingface checkpoint folder which contains the tokenizer.
    model_kwargs: Keyword arguments to pass to the model and tokenizer.
    kwargs: Extra arguments to pass to the model and tokenizer.

Returns:
    An object of BigdlLLM.
4BigdlLLM was deprecated. Please use IpexLLM instead.r   	AutoModelAutoModelForCausalLMAutoTokenizerLlamaTokenizerpCould not import bigdl-llm or transformers. Please install it with `pip install --pre --upgrade bigdl-llm[all]`.z`load_in_low_bit` option is not supported in BigdlLLM and 
                is ignored. For more data types support with `load_in_low_bit`, 
                use IpexLLM instead.zBigdlLLM only supports loading in 4-bit mode, i.e. load_in_4bit = True. Please install it with `pip install --pre --upgrade bigdl-llm[all]`.r   Ttrust_remote_coder   model	tokenizerr    )loggerwarningbigdl.llm.transformersr   r   transformersr   r   ImportError
ValueErrorfrom_pretrained	Exceptionitems)clsr   r   r
   r   r   r   r   r   r   r   _model_kwargs_tokenizer_idr   r   kvs                    Z/var/www/html/shao/venv/lib/python3.13/site-packages/langchain_community/llms/bigdl_llm.pyfrom_model_idBigdlLLM.from_model_id   s   2 	MN	 C &NN( W  %*$0	W%55mU}UI	(88'+/<E -/!.!4!4!6!6A!?R:R1!6    
&	

 
 	
U  	W 	2  	W&66}VVI	W  	--'+/<E	s;   C (C* <D *D2:D2C'*D
	D
D/.D/)r
   c                   [         R                  S5         SSKJnJn  SSKJnJn  U=(       d    0 n	U=(       d    Un
 UR                  " U
40 U	D6n UR                  " U40 U	D6nSU	;   a+  U	R                  5        VVs0 sH  u  pUS:w  d  M  X_M     n	nnU " SUUUU	S.UD6$ ! [         a    [        S5      ef = f! [         a    UR                  " U
40 U	D6n Nf = f! [         a    UR                  " U40 U	D6n Nf = fs  snnf )	a  
Construct low_bit object from model_id

Args:

    model_id: Path for the bigdl-llm transformers low-bit model folder.
    tokenizer_id: Path for the huggingface repo id or local model folder
              which contains the tokenizer.
    model_kwargs: Keyword arguments to pass to the model and tokenizer.
    kwargs: Extra arguments to pass to the model and tokenizer.

Returns:
    An object of BigdlLLM.
r   r   r   r   r   r   r   r   )r   r    r!   r   r   r"   r   r   r#   r%   r&   load_low_bitr'   )r(   r   r   r
   r   r   r   r   r   r)   r*   r   r   r+   r,   s                  r-   from_model_id_low_bitBigdlLLM.from_model_id_low_biti   sK   0 	MN	 C %*$0	W%55mU}UI	F(55hP-PE -/!.!4!4!6!6!?R:R!6    
&	

 
 	
1  	W 	  	W&66}VVI	W
  	F**8E}EE	Fs:   B# B< C >DD#B9<CCC?>C?c                     g)Nz	bigdl-llmr   )selfs    r-   	_llm_typeBigdlLLM._llm_type   s        r   )N)__name__
__module____qualname____firstlineno____doc__classmethodstrr   dictboolr   r   r.   r2   propertyr6   __static_attributes__r   r8   r-   r   r      s     (,Q

 '+!)-Q
Q
 tnQ

 smQ
 Q
 "#Q
 Q
 
Q
 Q
f  (,>

 '+>
>
 tn>

 sm>
 >
 
>
 >
@ 3  r8   r   )loggingtypingr   r   #langchain_core.language_models.llmsr   !langchain_community.llms.ipex_llmr   	getLoggerr9   r   r   r   r8   r-   <module>rI      s/       3 5			8	$aw ar8   