
    <hy                     T   S r SSKJrJr  SSKrSSKJr  SSKJr  SSK	J
r
  SSKJr  SS	KJr  SS
KJr  SSKJr  SSKJrJrJrJrJrJrJr  SSKJr  \R:                  " \5      r " S S\5      r  " S S\5      r! " S S\5      r" " S S\5      r# " S S\5      r$ " S S\5      r%/ SQr&g)zPyTorch BitNet model.    )CallableOptionalN   )Cache)FlashAttentionKwargs)CausalLMOutputWithPast)ALL_ATTENTION_FUNCTIONS)Unpack)logging   )GemmaMLP)LlamaAttentionLlamaDecoderLayerLlamaForCausalLM
LlamaModelLlamaRMSNormapply_rotary_pos_embeager_attention_forward   )BitNetConfigc                       \ rS rSrSrg)BitNetRMSNorm*    N__name__
__module____qualname____firstlineno____static_attributes__r       a/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/bitnet/modular_bitnet.pyr   r   *       r!   r   c                   6   ^  \ rS rSrS\4U 4S jjrS rSrU =r$ )	BitNetMLP.   configc                 j   > [         TU ]  U5        [        UR                  UR                  S9U l        g N)eps)super__init__r   intermediate_sizerms_norm_epsffn_sub_norm)selfr'   	__class__s     r"   r,   BitNetMLP.__init__/   s+     )&*B*BH[H[\r!   c           	          U R                  U R                  U R                  U R                  U5      5      U R	                  U5      -  5      5      nU$ )N)	down_projr/   act_fn	gate_projup_proj)r0   xr4   s      r"   forwardBitNetMLP.forward3   sF    NN4#4#4T[[PQAR5SVZVbVbcdVe5e#fg	r!   )r/   )	r   r   r   r   r   r,   r9   r    __classcell__r1   s   @r"   r%   r%   .   s    ]| ] r!   r%   c                   B  ^  \ rS rSrS\S\4U 4S jjr  SS\R                  S\	\R                  \R                  4   S\
\R                     S\
\   S	\
\R                     S
\\   S\	\R                  \
\R                     \
\	\R                        4   4S jjrSrU =r$ )BitNetAttention8   r'   	layer_idxc                 j   > [         TU ]  X5        [        UR                  UR                  S9U l        g r)   )r+   r,   r   hidden_sizer.   attn_sub_norm)r0   r'   r@   r1   s      r"   r,   BitNetAttention.__init__9   s+    +*6+=+=6CVCVWr!   hidden_statesposition_embeddingsattention_maskpast_key_valuecache_positionkwargsreturnc                 V   UR                   S S n/ UQSPU R                  P7nU R                  U5      R                  U5      R	                  SS5      n	U R                  U5      R                  U5      R	                  SS5      n
U R                  U5      R                  U5      R	                  SS5      nUu  p[        XX5      u  pUb$  XUS.nUR                  XU R                  U5      u  p[        nU R                  R                  S:w  a  [        U R                  R                     nU" U U	U
UU4U R                  (       d  SOU R                  U R                   S.UD6u  nnUR"                  " / UQSP76 R%                  5       nU R'                  U5      nU R)                  U5      nUU4$ )Nr   r   )sincosrI   eagerg        )dropoutscaling)shapehead_dimq_projview	transposek_projv_projr   updater@   r   r'   _attn_implementationr	   trainingattention_dropoutrR   reshape
contiguousrC   o_proj)r0   rE   rF   rG   rH   rI   rJ   input_shapehidden_shapequery_states
key_statesvalue_statesrO   rN   cache_kwargsattention_interfaceattn_outputattn_weightss                     r"   r9   BitNetAttention.forward=   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&#7RU#[ %#&nUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ "));;;;FFH((5kk+.L((r!   )rC   )NN)r   r   r   r   r   intr,   torchTensortupler   r   
LongTensorr
   r   r9   r    r;   r<   s   @r"   r>   r>   8   s    X| X X +/59+)||+) #5<<#=>+) !.	+)
 !+) !!1!12+) -.+) 
u||Xell3XeELL>Q5RR	S+) +)r!   r>   c                       \ rS rSrSrg)BitNetDecoderLayerk   r   Nr   r   r!   r"   rq   rq   k   r#   r!   rq   c                       \ rS rSrSrg)BitNetModelo   r   Nr   r   r!   r"   rt   rt   o   r#   r!   rt   c                   >   ^  \ rS rSrS/rSrSrS\4U 4S jjrSr	U =r
$ )BitNetForCausalLMs   zlm_head.weightNrK   c                 $   > [         TU ]  " S0 UD6$ )a  
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the masked language modeling loss. Indices should either be in `[0, transformers.,
    config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
    (masked), the loss is only computed for the tokens with labels in `[0, transformers., config.vocab_size]`.

Example:

```python
>>> from transformers import AutoTokenizer, BitNetForCausalLM

>>> model = BitNetForCausalLM.from_pretrained("microsoft/bitnet-b1.58-2B-4T")
>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/bitnet-b1.58-2B-4T")

>>> prompt = f'<|begin_of_text|>User: Hey, are you conscious? Can you talk to me?<|eot_id|>Assistant: '
>>> inputs = tokenizer(prompt, return_tensors="pt")

>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=100)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"User: Hey, are you conscious? Can you talk to me?Assistant: No, I'm not conscious. I'm an artificial intelligence designed to assist with information and tasks. How can I help you today?"
```r   )r+   r9   )r0   super_kwargsr1   s     r"   r9   BitNetForCausalLM.forwardx   s    4 w...r!   r   )r   r   r   r   _tied_weights_keys_tp_plan_pp_planr   r9   r    r;   r<   s   @r"   rw   rw   s   s*    *+HH/ 
 / /r!   rw   )rw   rt   BitNetPreTrainedModel)'__doc__typingr   r   rl   cache_utilsr   modeling_flash_attention_utilsr   modeling_outputsr   modeling_utilsr	   processing_utilsr
   utilsr   gemma.modeling_gemmar   llama.modeling_llamar   r   r   r   r   r   r   configuration_bitnetr   
get_loggerr   loggerr   r%   r>   rq   rt   rw   __all__r   r!   r"   <module>r      s     %    B 6 5 &  +   / 
		H	%	L 	 0)n 0)f	* 		* 	/( /Dr!   