
    <h0S                        S SK JrJrJr  S SKrS SKJr  SSKJr  SSKJ	r	J
r
  SSKJr  SSKJr  SS	KJr  SS
KJr  SSKJr  SSKJrJr  SSKJrJr  SSKJrJr  SSKJr  SSK J!r!J"r"J#r#  SSK$J%r%  SSK&J'r'  \" S5       " S S\RP                  5      5       r) " S S\RP                  5      r*S r+S5S jr,S\RZ                  S\.S\RZ                  4S jr/ S6S\RP                  S \RZ                  S!\RZ                  S"\RZ                  S#\\RZ                     S$\0S%\0S&\\!   4S' jjr1 " S( S)\RP                  5      r2 " S* S+\5      r3 " S, S-\RP                  5      r4\" " S. S/\5      5       r5\" " S0 S1\55      5       r6\" " S2 S3\5\5      5       r7/ S4Qr8g)7    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)check_model_inputs   )BitNetConfigRMSNormc                   8   ^  \ rS rSrSU 4S jjrS rS rSrU =r$ )BitNetRMSNorm*   c                    > [         TU ]  5         [        R                  " [        R
                  " U5      5      U l        X l        g)z,
BitNetRMSNorm is equivalent to T5LayerNorm
N)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeeps	__class__s      b/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/bitnet/modeling_bitnet.pyr#   BitNetRMSNorm.__init__,   s/     	ll5::k#:; #    c                    UR                   nUR                  [        R                  5      nUR	                  S5      R                  SSS9nU[        R                  " X0R                  -   5      -  nU R                  UR                  U5      -  $ )N   T)keepdim)	dtypetor%   float32powmeanrsqrtr(   r'   )r)   hidden_statesinput_dtypevariances       r-   forwardBitNetRMSNorm.forward4   sw    #))%((7 $$Q',,R,>%H?T?T4T(UU{{]--k:::r/   c                 ^    [        U R                  R                  5       SU R                   3$ )Nz, eps=)tupler'   shaper(   r)   s    r-   
extra_reprBitNetRMSNorm.extra_repr;   s*    ))*+6$2G2G1HIIr/   )r(   r'   )gư>)	__name__
__module____qualname____firstlineno__r#   r=   rC   __static_attributes____classcell__r,   s   @r-   r   r   *   s    $;J Jr/   r   c                   6   ^  \ rS rSrS\4U 4S jjrS rSrU =r$ )	BitNetMLP?   configc                   > [         TU ]  5         Xl        UR                  U l        UR                  U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l	        [        UR                     U l        [        UR                  UR                  S9U l        g )NFbiasr+   )r"   r#   rO   r*   intermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnr   rms_norm_epsffn_sub_normr)   rO   r,   s     r-   r#   BitNetMLP.__init__@   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../)&*B*BH[H[\r/   c           	          U R                  U R                  U R                  U R                  U5      5      U R	                  U5      -  5      5      nU$ N)rX   r\   rZ   rV   rW   )r)   xrX   s      r-   r=   BitNetMLP.forwardK   sF    NN4#4#4T[[PQAR5SVZVbVbcdVe5e#fg	r/   )rZ   rO   rX   r\   rV   r*   rT   rW   )	rE   rF   rG   rH   r   r#   r=   rI   rJ   rK   s   @r-   rM   rM   ?   s    	]| 	] r/   rM   c                     U SSU R                   S   S-  24   nU SU R                   S   S-  S24   n[        R                  " U* U4SS9$ )z*Rotates half the hidden dims of the input..Nr2   r1   dim)rA   r%   cat)ra   x1x2s      r-   rotate_halfri   P   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r/   c                     UR                  U5      nUR                  U5      nX-  [        U 5      U-  -   nX-  [        U5      U-  -   nXg4$ )a  Applies Rotary Position Embedding to the query and key tensors.

Args:
    q (`torch.Tensor`): The query tensor.
    k (`torch.Tensor`): The key tensor.
    cos (`torch.Tensor`): The cosine part of the rotary embedding.
    sin (`torch.Tensor`): The sine part of the rotary embedding.
    position_ids (`torch.Tensor`, *optional*):
        Deprecated and unused.
    unsqueeze_dim (`int`, *optional*, defaults to 1):
        The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
        sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
        that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
        k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
        cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
        the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
    `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
)	unsqueezeri   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r-   apply_rotary_pos_embrt   W   sS    ( --
&C
--
&Cw;q>C/0Gw;q>C/0Gr/   r:   n_repreturnc                     U R                   u  p#pEUS:X  a  U $ U SS2SS2SSS2SS24   R                  X#XU5      n U R                  X#U-  XE5      $ )z
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
r   N)rA   expandreshape)r:   ru   batchnum_key_value_headsslenhead_dims         r-   	repeat_kvr~   r   s_    
 2?1D1D.Ez!!Qa"23::5W\dlmM  e(CTTTr/   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 @   [        X R                  5      n[        X0R                  5      n	[        R                  " XR	                  SS5      5      U-  n
Ub"  US S 2S S 2S S 2S UR
                  S   24   nX-   n
[        R                  R                  U
S[        R                  S9R                  UR                  5      n
[        R                  R                  XU R                  S9n
[        R                  " X5      nUR	                  SS5      R                  5       nX4$ )Nr1   r   r2   )re   r4   )ptrainingr   )r~   num_key_value_groupsr%   matmul	transposerA   r   
functionalsoftmaxr6   r5   r4   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r-   eager_attention_forwardr   ~   s     3 ; ;<JU$?$?@L<<';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#1==((2U]](SVVW\WbWbcL==((6??([L,,|:K''1-88:K$$r/   c                   F  ^  \ rS rSrSrS\S\4U 4S jjr  SS\R                  S\
\R                  \R                  4   S\\R                     S	\\   S
\\R                     S\\   S\
\R                  \\R                     \\
\R                        4   4S jjrSrU =r$ )BitNetAttention   z=Multi-headed attention from 'Attention Is All You Need' paperrO   	layer_idxc                   > [         TU ]  5         Xl        X l        [	        USUR
                  UR                  -  5      U l        UR                  UR                  -  U l	        U R                  S-  U l
        UR                  U l        SU l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR                  U R                  -  UR
                  UR                  S9U l        [)        UR
                  UR*                  S9U l        g )Nr}   g      TrQ   rS   )r"   r#   rO   r   getattrr*   num_attention_headsr}   r{   r   r   attention_dropout	is_causalr   rU   attention_biasq_projk_projv_projo_projr   r[   attn_sub_normr)   rO   r   r,   s      r-   r#   BitNetAttention.__init__   sf   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
 +6+=+=6CVCVWr/   r:   position_embeddingsr   past_key_valuecache_positionr   rv   c                 V   UR                   S S n/ UQSPU R                  P7nU R                  U5      R                  U5      R	                  SS5      n	U R                  U5      R                  U5      R	                  SS5      n
U R                  U5      R                  U5      R	                  SS5      nUu  p[        XX5      u  pUb$  XUS.nUR                  XU R                  U5      u  p[        nU R                  R                  S:w  a  [        U R                  R                     nU" U U	U
UU4U R                  (       d  SOU R                  U R                   S.UD6u  nnUR"                  " / UQSP76 R%                  5       nU R'                  U5      nU R)                  U5      nUU4$ )Nr2   r   r1   )ro   rn   r   eager        )r   r   )rA   r}   r   viewr   r   r   rt   updater   r   rO   _attn_implementationr   r   r   r   ry   r   r   r   )r)   r:   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rn   ro   cache_kwargsattention_interfacer   r   s                     r-   r=   BitNetAttention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&#7RU#[ %#&nUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ "));;;;FFH((5kk+.L((r/   )r   r   rO   r}   r   r   r   r   r   r   r   r   )NN)rE   rF   rG   rH   __doc__r   intr#   r%   Tensorr@   r   r	   
LongTensorr   r   r=   rI   rJ   rK   s   @r-   r   r      s    GX| X X: +/59+)||+) #5<<#=>+) !.	+)
 !+) !!1!12+) -.+) 
u||Xell3XeELL>Q5RR	S+) +)r/   r   c                   8  ^  \ rS rSrS\S\4U 4S jjr      SS\R                  S\	\R                     S\	\R                     S\	\   S	\	\   S
\	\R                     S\	\\R                  \R                  4      S\\   S\\R                     4S jjrSrU =r$ )BitNetDecoderLayer   rO   r   c                   > [         TU ]  5         UR                  U l        [        XS9U l        [        U5      U l        [        UR                  UR                  S9U l	        [        UR                  UR                  S9U l
        g )N)rO   r   rS   )r"   r#   r*   r   	self_attnrM   mlpr   r[   input_layernormpost_attention_layernormr   s      r-   r#   BitNetDecoderLayer.__init__   sj    !--(LV$,V-?-?VEXEXY(5f6H6HfNaNa(b%r/   r:   r   rp   r   	use_cacher   r   r   rv   c                     Un	U R                  U5      nU R                  " SUUUUUUUS.UD6u  pX-   nUn	U R                  U5      nU R                  U5      nX-   nU$ )N)r:   r   rp   r   r   r   r    )r   r   r   r   )r)   r:   r   rp   r   r   r   r   r   residual_s              r-   r=   BitNetDecoderLayer.forward   s     !,,];>> 	
')%)) 3	
 	
 !0 !55mD/ 0r/   )r*   r   r   r   r   )NNNFNN)rE   rF   rG   rH   r   r   r#   r%   r   r   r   r	   boolr@   r   r   r=   rI   rJ   rK   s   @r-   r   r      s    c| c c 2637*.$)59KO|| !. u//0	
 ! D> !!1!12 &eELL%,,,F&GH +, 
u||	 r/   r   c                   l   ^  \ rS rSrSS\4U 4S jjjr\R                  " 5       \S 5       5       r	Sr
U =r$ )BitNetRotaryEmbeddingi  rO   c                   > [         TU ]  5         [        US5      (       aZ  [        UR                  [
        5      (       a;  UR                  R                  SUR                  R                  S5      5      U l        OSU l        UR                  U l	        UR                  U l
        Xl        [        U R                     U l        U R                  U R                  U5      u  o0l        U R                  SUSS9  U R                   U l        g )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)r"   r#   hasattr
isinstancer   dictgetr   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrO   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r)   rO   devicer   r,   s       r-   r#   BitNetRotaryEmbedding.__init__  s    6>**z&:M:Mt/T/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q((ZeD!%r/   c                 b   U R                   S S S 2S 4   R                  5       R                  UR                  S   SS5      R	                  UR
                  5      nUS S 2S S S 24   R                  5       n[        UR
                  R                  [        5      (       a0  UR
                  R                  S:w  a  UR
                  R                  OSn[        R                  " USS9   UR                  5       UR                  5       -  R                  SS5      n[        R                  " Xf4SS	9nUR                  5       U R                  -  nUR                  5       U R                  -  n	S S S 5        WR	                  UR                   S
9W	R	                  UR                   S
94$ ! , (       d  f       N@= f)Nr   r2   r   mpscpuF)device_typeenabledr1   rd   )r4   )r   floatrx   rA   r5   r   r   r   strr%   autocastr   rf   rn   r   ro   r4   )
r)   ra   rp   inv_freq_expandedposition_ids_expandedr   freqsembrn   ro   s
             r-   r=   BitNetRotaryEmbedding.forward   sR    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E'E!((--[`J`ahhmmfk^^UC&,,.1F1L1L1NNYYZ[]^_E))UN3C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs   $BF  
F.)r   rO   r   r   r   r   r   r`   )rE   rF   rG   rH   r   r#   r%   no_gradr   r=   rI   rJ   rK   s   @r-   r   r     s6    /| / /" ]]_<  <r/   r   c                   R    \ rS rSr% \\S'   SrSrS/rS/r	Sr
SrSrSrSr\\S.rSrg	)
BitNetPreTrainedModeli0  rO   modelTr   past_key_values)r:   
attentionsr   N)rE   rF   rG   rH   r   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsrI   r   r/   r-   r   r   0  sQ    &*#-.#4"5N!"&+%r/   r   c                     ^  \ rS rSrS\4U 4S jjr\\       SS\\	R                     S\\	R                     S\\	R                     S\\   S\\	R                     S	\\	R                     S
\\   S\\   S\4S jj5       5       rSrU =r$ )BitNetModeliC  rO   c           	        > [         TU ]  U5        UR                  U l        UR                  U l        [
        R                  " UR                  UR                  U R                  5      U l        [
        R                  " [        UR                  5       Vs/ sH  n[        X5      PM     sn5      U l        [        UR                  UR                  S9U l        [#        US9U l        SU l        U R)                  5         g s  snf )NrS   )rO   F)r"   r#   pad_token_idpadding_idx
vocab_sizer   	Embeddingr*   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r[   normr   
rotary_embgradient_checkpointing	post_initr   s      r-   r#   BitNetModel.__init__E  s     !.. ++LL):):F<N<NPTP`P`ammDI&JbJbDcdDcy2Dcd
 "&"4"4&:M:MN	/v>&+# 	 es   C>	input_idsr   rp   r   inputs_embedsr   r   r   rv   c           
      8   US L US L-  (       a  [        S5      eUc  U R                  U5      nU(       a  Uc
  [        5       nUcD  Ub  UR                  5       OSn	[        R
                  " XUR                  S   -   UR                  S9nUc  UR                  S5      n[        U R                  UUUUUS9n
UnU R                  X5      nU R                  S U R                  R                    H  nU" U4U
UUUUS.UD6nM     U R                  U5      n[        UUS9$ )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   )r   )rO   input_embedsr   r   r   rp   )r   rp   r   r   r   )last_hidden_stater   )
ValueErrorr  r
   get_seq_lengthr%   arangerA   r   rk   r   rO   r  r
  r	  r  r   )r)   r  r   rp   r   r  r   r   r   past_seen_tokensr   r:   r   decoder_layers                 r-   r=   BitNetModel.forwardU  sK    -t";<YZZ *.*;*;I*FM0*nO!CRC^==?de+0<< ]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oomJ![[)H4;;+H+HIM)*).-$7 M J 		-0&++
 	
r/   )r  r  r
  r  r  r  r  )NNNNNNN)rE   rF   rG   rH   r   r#   r   r   r   r%   r   r   r	   FloatTensorr   r   r   r   r=   rI   rJ   rK   s   @r-   r   r   C  s    |    151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r/   r   c                   n  ^  \ rS rSrS/rSrSrU 4S jrS rS r	\
\         SS\\R                     S\\R                     S	\\R                     S
\\   S\\R"                     S\\R                     S\\   S\\R                     S\\\R                  4   S\\   S\4S jj5       5       rSrU =r$ )BitNetForCausalLMi  zlm_head.weightNc                    > [         TU ]  U5        [        U5      U l        UR                  U l        [
        R                  " UR                  UR                  SS9U l        U R                  5         g )NFrQ   )
r"   r#   r   r   r  r   rU   r*   lm_headr  r]   s     r-   r#   BitNetForCausalLM.__init__  sU      (
 ++yy!3!3V5F5FUS 	r/   c                     Xl         g r`   r   )r)   decoders     r-   set_decoderBitNetForCausalLM.set_decoder  s    
r/   c                     U R                   $ r`   r"  rB   s    r-   get_decoderBitNetForCausalLM.get_decoder  s    zzr/   r  r   rp   r   r  labelsr   r   logits_to_keepr   rv   c
                 ~   U R                   " SUUUUUUUS.U
D6nUR                  n[        U	[        5      (       a  [	        U	* S5      OU	nU R                  USS2USS24   5      nSnUb)  U R                  " SXU R                  R                  S.U
D6n[        UUUR                  UR                  UR                  S9$ )a  
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the masked language modeling loss. Indices should either be in `[0, transformers.,
    config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
    (masked), the loss is only computed for the tokens with labels in `[0, transformers., config.vocab_size]`.

Example:

```python
>>> from transformers import AutoTokenizer, BitNetForCausalLM

>>> model = BitNetForCausalLM.from_pretrained("microsoft/bitnet-b1.58-2B-4T")
>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/bitnet-b1.58-2B-4T")

>>> prompt = f'<|begin_of_text|>User: Hey, are you conscious? Can you talk to me?<|eot_id|>Assistant: '
>>> inputs = tokenizer(prompt, return_tensors="pt")

>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=100)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"User: Hey, are you conscious? Can you talk to me?Assistant: No, I'm not conscious. I'm an artificial intelligence designed to assist with information and tasks. How can I help you today?"
```)r  r   rp   r   r  r   r   N)logitsr)  r  )lossr,  r   r:   r   r   )r   r  r   r   slicer  loss_functionrO   r  r   r   r:   r   )r)   r  r   rp   r   r  r)  r   r   r*  r   outputsr:   slice_indicesr,  r-  s                   r-   r=   BitNetForCausalLM.forward  s    J ,0:: 	,
)%+')	,
 	,
  118B>SV8W8W~ot4]kmA}a,?@A%%pVt{{OeOepiopD%#33!//))
 	
r/   )r  r   r  )	NNNNNNNNr   )rE   rF   rG   rH   _tied_weights_keys_tp_plan_pp_planr#   r$  r'  r   r   r   r%   r   r   r	   r  r   r   r   r   r   r   r=   rI   rJ   rK   s   @r-   r  r    s&   *+HH  151537+/59-1$(5934=
E,,-=
 !.=
 u//0	=

 "%=
   1 12=
 ))*=
 D>=
 !!1!12=
 c5<</0=
 +,=
 
 =
  =
r/   r  )r  r   r   )Nr   )r   )9typingr   r   r   r%   r   activationsr   cache_utilsr	   r
   
generationr   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   configuration_bitnetr   Moduler   rM   ri   rt   r   r   r~   r   r   r   r   r   r   r   r  __all__r   r/   r-   <module>rG     s  * - ,   ! . ) 7 / B 9 O K F & I I / . Y'JBII J (J(		 "(6	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4F)bii F)R*3 *Z<BII <D O  $ K
' K
 K
\ S
- S
 S
l Hr/   