
    <hO                        S SK r S SKJr  S SKrS SKJr  SSKJrJr  SSKJ	r	J
r
  SSKJr  SS	KJr  SS
KJrJrJrJrJrJrJrJr  SSKJr  SSKJr  \R8                  " \5      rSrSr  " S S\5      r!S r" " S S\RF                  5      r$ " S S\$5      r% " S S\$5      r&\$\%\&S.r' " S S\5      r( " S S\5      r) " S S\5      r* " S  S!\5      r+ " S" S#\5      r, " S$ S%\5      r- " S& S'\5      r./ S(Qr/g))    N)Optional)nn   )CacheStaticCache)_flash_attention_forward!flash_attn_supports_top_left_mask)logging   )GemmaForCausalLM)LlamaDecoderLayerLlamaForQuestionAnsweringLlamaForSequenceClassificationLlamaForTokenClassification
LlamaModelLlamaPreTrainedModelapply_rotary_pos_emb	repeat_kv)
MistralMLP   )DiffLlamaConfigzkajuma/DiffLlama-0.3B-handcutr   c                       \ rS rSrSrg)DiffLlamaMLP0    N__name__
__module____qualname____firstlineno____static_attributes__r       g/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/diffllama/modular_diffllama.pyr   r   0       r"   r   c                 @    SS[         R                  " SU -  5      -  -
  $ )Ng?g333333?g333333ӿ)mathexp)	layer_idxs    r#   lambda_init_fnr)   4   s     txxy 01111r"   c                   n  ^  \ rS rSrSrSS\S\\   4U 4S jjjr     SS\	R                  S\\	R                  \	R                  4   S\\	R                     S	\\	R                     S
\\   S\S\\	R                     S\\	R                  \\	R                     \\\	R                        4   4S jjrSrU =r$ )DiffLlamaAttention8   z=Multi-headed attention from 'Attention Is All You Need' paperconfigr(   c                   > [         TU ]  5         Xl        X l        Uc-  [        R                  SU R                  R                   S35        UR                  U l        UR                  U l	        UR                  U l        [        USU R                  U R                  -  5      U l        UR                  U l        U R                  U R                  -  U l        UR                   U l        UR"                  U l        SU l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  -  U R                  UR*                  S9U l        [5        U5      U l        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l         [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l!        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l"        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l#        [&        RH                  " SU R                  -  URJ                  S	S
9U l&        g )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.head_dimT)biasr   )sizer   F)epselementwise_affine)'super__init__r-   r(   loggerwarning_once	__class__r   attention_dropouthidden_sizenum_attention_heads	num_headsgetattrr/   num_key_value_headsnum_key_value_groupsmax_position_embeddings
rope_theta	is_causalr   Linearattention_biasq_projk_projv_projo_projr)   lambda_init	Parametertorchnormallambda_std_dev	lambda_q1	lambda_k1	lambda_q2	lambda_k2RMSNormrms_norm_eps	groupnormselfr-   r(   r8   s      r#   r5   DiffLlamaAttention.__init__;   sz   " !8!8 9 :, , "(!9!9!--33
D4D4D4VW#)#=#= $(NNd6N6N$N!'-'E'E$ ++ii 0 0$..4==2PW]WlWlmii 0 0$2J2JT]]2Zagavavwii 0 0$2J2JT]]2Zagavavwii >@P@PW]WlWlm))4ell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdA$56;N;Nchir"   hidden_statesposition_embeddingsattention_maskposition_idspast_key_value	use_cachecache_positionreturnc                    UR                  5       u  pnU
nU R                  U5      nU R                  U5      nU R                  U5      nUR	                  XU R
                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUu  nn[        XUU5      u  pUb%  UUUS.nUR                  XU R                  U5      u  p[        XR                  5      n[        XR                  5      n[        R                  " [        R                   " USSS9SS9nUR#                  SSSS5      n[        R$                  " XR                  SS5      5      [&        R(                  " U R                  5      -  nUb#  US S 2S S 2S S 2S UR*                  S   24   nUU-   n[,        R.                  R1                  US[        R2                  S9R5                  UR6                  5      n[,        R.                  R9                  UU R:                  U R<                  S	9n[        R>                  " [        R@                  " U RB                  U RD                  -  S[        R2                  S95      R5                  UR6                  5      n[        R>                  " [        R@                  " U RF                  U RH                  -  S[        R2                  S95      R5                  UR6                  5      nUU-
  U RJ                  -   n[        R$                  " UU5      n[        R                   " USSS9u  nnUUU-  -
  nSU RJ                  -
  U RM                  U5      -  nUR                  SS5      RO                  5       nURQ                  XS5      nU RS                  U5      nUU4$ )
Nr   r   sincosr^   dimr   re   dtype)ptraining)*r1   rE   rF   rG   viewr<   r/   	transposer>   r   updater(   r   r?   rK   catchunkrepeatmatmulr&   sqrtshaper   
functionalsoftmaxfloat32tori   dropoutr9   rk   r'   sumrN   rO   rP   rQ   rI   rT   
contiguousreshaperH   )rV   rX   rY   rZ   r[   r\   r]   r^   kwargsbsz
target_len_q_lenquery_states
key_statesvalue_statesrc   rb   cache_kwargsattn_weightscausal_masklambda_1lambda_2lambda_fullattn_outputattn_output1attn_output2s                              r#   forwardDiffLlamaAttention.forward]   sd    +//1{{=1[[/
{{=1#((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&S#7RUWZ#[ %#&snUL'5'<'<ZW[WeWegs't$Jz+D+DE
 /H/HIyy\1!!D"M#**1aA6||L2F2Fq!2LMPTPYPYZ^ZgZgPhh%(Aq2HJ4D4DR4H2H)HIK'+5L }},,\r,WZZ[g[m[mn}},,\T=S=S^b^k^k,l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<ll<>%*[[aQ%G"l"[<%??4+++t~~k/JJ!++Aq1<<>!))#b9kk+.L((r"   )r9   r-   rT   r/   r:   rB   rF   rI   rO   rQ   rN   rP   r(   r@   r<   r?   r>   rH   rE   rA   rG   NNNNFN)r   r   r   r    __doc__r   r   intr5   rK   Tensortuple
LongTensorr   boolr   r!   __classcell__r8   s   @r#   r+   r+   8   s    G j  j8C=  j  jL 2637*.59<)||<) #5<<#=><) !.	<)
 u//0<) !<) <) !!1!12<) 
u||Xell3XeELL>Q5RR	S<) <)r"   r+   c                   X  ^  \ rS rSrSrU 4S jr     SS\R                  S\\R                  \R                  4   S\	\R                     S\	\R                     S\	\   S	\S
\	\R                     S\\R                  \	\R                     \	\\R                        4   4S jjrSrU =r$ )DiffLlamaFlashAttention2   a>  
DiffLlama flash attention module. This module inherits from `DiffLlamaAttention` as the weights of the module stays
untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
flash attention and deal with padding tokens in case the input contains any of them.
c                 D   > [         TU ]  " U0 UD6  [        5       U l        g r   )r4   r5   r	   _flash_attn_uses_top_left_mask)rV   argsr}   r8   s      r#   r5   !DiffLlamaFlashAttention2.__init__   s#    $)&)
 /P.Q+r"   rX   rY   rZ   r[   r\   r]   r^   r_   c                 H
   [        U[        5      (       a  [        S5      eUR                  5       u  pn
U R	                  U5      nU R                  U5      nU R                  U5      nUR                  XU R                  U R                  5      R                  SS5      nUR                  XU R                  U R                  5      R                  SS5      nUR                  XU R                  U R                  5      R                  SS5      nUc)  [        R                  S5        U R                  X5      u  pOUu  p[        XX5      u  pUb$  XUS.nUR!                  XU R"                  U5      u  pUR                  SS5      nUR                  SS5      nUR                  SS5      nU R$                  (       a  U R&                  OSnUR(                  nUR*                  R,                  S:w  a  UR*                  R,                  OSnU[.        R0                  :X  a  [.        R2                  " 5       (       aA  [5        [.        S	5      (       a  [.        R6                  " U5      O[.        R8                  " 5       nOR[5        U R:                  S
5      (       a  U R:                  R<                  nO U R                  R>                  R(                  n[        R                  SU S35        URA                  U5      nURA                  U5      nURA                  U5      n[.        RB                  " USSS9u  nnURE                  SSSS5      nURE                  SSSS5      n[G        UUUUU	UU[I        U SS 5      U RJ                  U RL                  S9
n[G        UUUUU	UU[I        U SS 5      U RJ                  U RL                  S9
n[.        RN                  " UU/SS9n[.        RB                  " USSS9u  nn[.        RP                  " [.        RR                  " U RT                  U RV                  -  S[.        R0                  S95      RA                  UR(                  5      n[.        RP                  " [.        RR                  " U RX                  U RZ                  -  S[.        R0                  S95      RA                  UR(                  5      nUU-
  U R\                  -   nUUU-  -
  nSU R\                  -
  U R_                  U5      -  nURa                  XS5      Rc                  5       nU Re                  U5      nUS 4$ )Nz`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformersr   r   aY  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `position_ids` (2D tensor with the indexes of the tokens), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.46 `position_ids` will be removed and `position_embeddings` will be mandatory.ra           mpscpuget_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .rd   sliding_window)r[   ry   r   use_top_left_maskrB   rf   rh   )3
isinstancer   
ValueErrorr1   rE   rF   rG   rl   r<   r/   rm   r>   r6   r7   
rotary_embr   rn   r(   rk   r9   ri   devicetyperK   rw   is_autocast_enabledhasattrr   get_autocast_gpu_dtyper-   r   weightrx   rp   rq   r   r=   r   rB   ro   r'   rz   rN   rO   rP   rQ   rI   rT   r|   r{   rH   )rV   rX   rY   rZ   r[   r\   r]   r^   r~   r   r   r   r   r   rc   rb   r   dropout_rateinput_dtypedevice_typetarget_dtypevalue_states1value_states2r   r   r   r   r   r   s                                r#   r    DiffLlamaFlashAttention2.forward   s    nk22} 
 &**,A{{=1[[/
{{=1
 $((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&G |BHC*HC#7RU#[ %#&nUL'5'<'<ZW[WeWegs't$J $--a3))!Q/
#--a315t--C #((2>2E2E2J2Je2Sl))..Y^%--'((** u&:;; ,,[9557  &?@@#{{BB#{{1177 >$ (??<8L#|4J'??<8L',{{<'J$}%,,Q1a8%,,Q1a8/% "4)94@"AAnn
 0% "4)94@"AAnn
 ii| <"E%*[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!))#b9DDFkk+.D  r"   )r   r   )r   r   r   r    r   r5   rK   r   r   r   r   r   r   r   r!   r   r   s   @r#   r   r      s    R 6:37*.59B!||B! #5<<#=>B! !!1!12	B!
 u//0B! !B! B! !!1!12B! 
u||Xell3XeELL>Q5RR	SB! B!r"   r   c                   D   \ rS rSrSr     SS\R                  S\\R                  \R                  4   S\\R                     S\\R                     S\\
   S	\S
\\R                     S\\R                  \\R                     \\\R                        4   4S jjrSrg)DiffLlamaSdpaAttentioni0  z
DiffLlama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
`DiffLlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
SDPA API.
NrX   rY   rZ   r[   r\   r]   r^   r_   c           	         UR                  5       u  pnU R                  U5      nU R                  U5      nU R                  U5      nUR	                  XU R
                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUu  nn[        XUU5      u  pUb$  UXS.nUR                  XU R                  U5      u  p[        XR                  5      n[        XR                  5      n[        R                  " [        R                   " USSS9SS9nUR#                  SSSS5      nUnUb  US S 2S S 2S S 2S UR$                  S   24   nUR&                  R(                  S:X  a3  Ub0  UR+                  5       nUR+                  5       nUR+                  5       nUS L =(       a    U
S:  n[        R,                  R.                  R1                  UUUUU R2                  (       a  U R4                  OSUS	9n[        R                   " USSS9u  nn[        R6                  " [        R8                  " U R:                  U R<                  -  S[        R>                  S
95      RA                  URB                  5      n[        R6                  " [        R8                  " U RD                  U RF                  -  S[        R>                  S
95      RA                  URB                  5      nUU-
  U RH                  -   nUUU-  -
  nSU RH                  -
  U RK                  U5      -  nUR                  SS5      R+                  5       nUR	                  XS5      nU RM                  U5      nUS 4$ )Nr   r   ra   rd   rf   rg   cudar   )	attn_mask	dropout_prB   rh   )'r1   rE   rF   rG   rl   r<   r/   rm   r>   r   rn   r(   r   r?   rK   ro   rp   rq   rt   r   r   r{   r   ru   scaled_dot_product_attentionrk   r9   r'   rz   rN   rO   rw   rx   ri   rP   rQ   rI   rT   rH   )rV   rX   rY   rZ   r[   r\   r]   r^   r}   r~   r   r   r   r   r   rc   rb   r   r   rB   r   r   r   r   r   r   s                             r#   r   DiffLlamaSdpaAttention.forward8  sU    &**,A{{=1[[/
{{=1#((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&S#7RUWZ#[ %#&sUL'5'<'<ZW[WeWegs't$Jz+D+DE
 /H/HIyy\1!!D"M#**1aA6$%%aA/E1A1A"1E/E&EFK ##v-+2I'224L#..0J'224L  4'5EAI	hh))FF!04d,,3 G 
 &+[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!++Aq1<<>!&&s26kk+.D  r"   r   r   )r   r   r   r    r   rK   r   r   r   r   r   r   r   r!   r   r"   r#   r   r   0  s     2637*.59I!||I! #5<<#=>I! !.	I!
 u//0I! !I! I! !!1!12I! 
u||Xell3XeELL>Q5RR	SI! I!r"   r   )eagerflash_attention_2sdpac                   4   ^  \ rS rSrS\S\4U 4S jjrSrU =r$ )DiffLlamaDecoderLayeri  r-   r(   c                 \   > [         TU ]  X5        [        UR                     " XS9U l        g )N)r-   r(   )r4   r5   DIFFLLAMA_ATTENTION_CLASSES_attn_implementation	self_attnrU   s      r#   r5   DiffLlamaDecoderLayer.__init__  s(    +4V5P5PQY_ur"   )r   )	r   r   r   r    r   r   r5   r!   r   r   s   @r#   r   r     s    v v3 v vr"   r   c                   "    \ rS rSrSrSrS rSrg)DiffLlamaPreTrainedModeli  Fc                 ,   [         R                  " U5        [        U[        5      (       a  UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        g g )Nr   )r   _init_weightsr   r+   rN   datanormal_r-   rM   rO   rP   rQ   )rV   modules     r#   r   &DiffLlamaPreTrainedModel._init_weights  s    **62f011!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH	 2r"   r   N)r   r   r   r    _supports_flex_attn_supports_attention_backendr   r!   r   r"   r#   r   r     s    "'Ir"   r   c                       \ rS rSrSrg)DiffLlamaModeli  r   Nr   r   r"   r#   r   r     r$   r"   r   c                       \ rS rSrSrg)DiffLlamaForCausalLMi  r   Nr   r   r"   r#   r   r     r$   r"   r   c                       \ rS rSrSrg)"DiffLlamaForSequenceClassificationi  r   Nr   r   r"   r#   r   r     r$   r"   r   c                       \ rS rSrSrg)DiffLlamaForQuestionAnsweringi  r   Nr   r   r"   r#   r   r     r$   r"   r   c                       \ rS rSrSrg)DiffLlamaForTokenClassificationi  r   Nr   r   r"   r#   r   r     r$   r"   r   )r   r   r   r   r   r   )0r&   typingr   rK   r   cache_utilsr   r   modeling_flash_attention_utilsr   r	   utilsr
   gemma.modeling_gemmar   llama.modeling_llamar   r   r   r   r   r   r   r   mistral.modeling_mistralr   configuration_diffllamar   
get_loggerr   r6   _CHECKPOINT_FOR_DOC_CONFIG_FOR_DOCr   r)   Moduler+   r   r   r   r   r   r   r   r   r   r   __all__r   r"   r#   <module>r      s  $     - i  3	 	 	 2 4 
		H	%5 #	: 	2a) a)HQ!1 Q!hQ!/ Q!j  1" v- v
I3 
I	Z 		+ 		)G 		$= 		&A 	r"   