
    <h:                     :   S SK r S SKJrJr  S SKrS SKJr  SSKJr  SSKJ	r	J
r
Jr  SSKJr  SSKJr  SS	KJr  SS
KJrJr  SSKJrJrJrJr  SSKJrJr  SSKJrJr  SSK J!r!  SSK"J#r#  SSK$J%r%J&r&J'r'J(r(  SSK)J*r*  SSK+J,r,  \(RZ                  " \.5      r/ " S S\R`                  5      r1S r2S8S jr3S\Rh                  S\5S\Rh                  4S jr6S r7 " S S\R`                  5      r8 " S S \85      r9 " S! S"\85      r:\" S#5       " S$ S%\R`                  5      5       r;\8\9\:S&.r< " S' S(\5      r=\& " S) S*\!5      5       r> " S+ S,\R`                  5      r?\& " S- S.\>5      5       r@\& " S/ S0\>\5      5       rA " S1 S2\\>5      rB " S3 S4\\>5      rC " S5 S6\\>5      rD/ S7QrEg)9    N)OptionalUnion)nn   )ACT2FN)CacheDynamicCacheStaticCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask)_flash_attention_forward!flash_attn_supports_top_left_mask)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)PreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuplelogging)check_model_inputs   )DiffLlamaConfigc                   .   ^  \ rS rSrU 4S jrS rSrU =r$ )DiffLlamaMLP6   c                   > [         TU ]  5         Xl        UR                  U l        UR                  U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l	        [        UR                     U l        g NFbias)super__init__confighidden_sizeintermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnselfr*   	__class__s     h/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/diffllama/modeling_diffllama.pyr)   DiffLlamaMLP.__init__7   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../    c                     U R                  U R                  U R                  U5      5      U R                  U5      -  5      nU$ N)r0   r2   r.   r/   )r4   xr0   s      r6   forwardDiffLlamaMLP.forwardA   s6    NN4;;t~~a/@#ADLLQRO#ST	r8   )r2   r*   r0   r.   r+   r,   r/   )__name__
__module____qualname____firstlineno__r)   r<   __static_attributes____classcell__r5   s   @r6   r"   r"   6   s    0 r8   r"   c                     U SSU R                   S   S-  24   nU SU R                   S   S-  S24   n[        R                  " U* U4SS9$ )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)r;   x1x2s      r6   rotate_halfrO   F   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r8   c                     UR                  U5      nUR                  U5      nX-  [        U 5      U-  -   nX-  [        U5      U-  -   nXg4$ )a  Applies Rotary Position Embedding to the query and key tensors.

Args:
    q (`torch.Tensor`): The query tensor.
    k (`torch.Tensor`): The key tensor.
    cos (`torch.Tensor`): The cosine part of the rotary embedding.
    sin (`torch.Tensor`): The sine part of the rotary embedding.
    position_ids (`torch.Tensor`, *optional*):
        Deprecated and unused.
    unsqueeze_dim (`int`, *optional*, defaults to 1):
        The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
        sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
        that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
        k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
        cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
        the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
    `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
)	unsqueezerO   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r6   apply_rotary_pos_embrZ   M   sS    ( --
&C
--
&Cw;q>C/0Gw;q>C/0Gr8   hidden_statesn_repreturnc                     U R                   u  p#pEUS:X  a  U $ U SS2SS2SSS2SS24   R                  X#XU5      n U R                  X#U-  XE5      $ )z
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
r   N)rJ   expandreshape)r[   r\   batchnum_key_value_headsslenhead_dims         r6   	repeat_kvre   h   s_    
 2?1D1D.Ez!!Qa"23::5W\dlmM  e(CTTTr8   c                 @    SS[         R                  " SU -  5      -  -
  $ )Ng?g333333?g333333ӿ)mathexp)	layer_idxs    r6   lambda_init_fnrj   t   s     txxy 01111r8   c                   n  ^  \ rS rSrSrSS\S\\   4U 4S jjjr     SS\	R                  S\\	R                  \	R                  4   S\\	R                     S	\\	R                     S
\\   S\S\\	R                     S\\	R                  \\	R                     \\\	R                        4   4S jjrSrU =r$ )DiffLlamaAttentionx   z=Multi-headed attention from 'Attention Is All You Need' paperr*   ri   c                   > [         TU ]  5         Xl        X l        Uc-  [        R                  SU R                  R                   S35        UR                  U l        UR                  U l	        UR                  U l        [        USU R                  U R                  -  5      U l        UR                  U l        U R                  U R                  -  U l        UR                   U l        UR"                  U l        SU l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  U R                  -  UR*                  S9U l        [&        R(                  " U R                  U R                  -  U R                  UR*                  S9U l        [5        U5      U l        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l         [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l!        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l"        [&        R8                  " [:        R<                  " SUR>                  U R                  4S95      U l#        [&        RH                  " SU R                  -  URJ                  S	S
9U l&        g )NzInstantiating z without passing a `layer_idx` is not recommended and will lead to errors during the forward call if caching is used. Please make sure to provide a `layer_idx` when creating this class.rd   Tr&   r   )sizerG   F)epselementwise_affine)'r(   r)   r*   ri   loggerwarning_oncer5   r>   attention_dropoutr+   num_attention_heads	num_headsgetattrrd   rb   num_key_value_groupsmax_position_embeddings
rope_theta	is_causalr   r-   attention_biasq_projk_projv_projo_projrj   lambda_init	ParameterrK   normallambda_std_dev	lambda_q1	lambda_k1	lambda_q2	lambda_k2RMSNormrms_norm_eps	groupnormr4   r*   ri   r5   s      r6   r)   DiffLlamaAttention.__init__{   sz   " !8!8 9 :, , "(!9!9!--33
D4D4D4VW#)#=#= $(NNd6N6N$N!'-'E'E$ ++ii 0 0$..4==2PW]WlWlmii 0 0$2J2JT]]2Zagavavwii 0 0$2J2JT]]2Zagavavwii >@P@PW]WlWlm))4ell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdell1f6K6KSWS`S`Rb&cdA$56;N;Nchir8   r[   position_embeddingsattention_maskrV   past_key_value	use_cachecache_positionr]   c                    UR                  5       u  pnU
nU R                  U5      nU R                  U5      nU R                  U5      nUR	                  XU R
                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUu  nn[        XUU5      u  pUb%  UUUS.nUR                  XU R                  U5      u  p[        XR                  5      n[        XR                  5      n[        R                  " [        R                   " USSS9SS9nUR#                  SSSS5      n[        R$                  " XR                  SS5      5      [&        R(                  " U R                  5      -  nUb#  US S 2S S 2S S 2S UR*                  S   24   nUU-   n[,        R.                  R1                  US[        R2                  S9R5                  UR6                  5      n[,        R.                  R9                  UU R:                  U R<                  S	9n[        R>                  " [        R@                  " U RB                  U RD                  -  S[        R2                  S95      R5                  UR6                  5      n[        R>                  " [        R@                  " U RF                  U RH                  -  S[        R2                  S95      R5                  UR6                  5      nUU-
  U RJ                  -   n[        R$                  " UU5      n[        R                   " USSS9u  nnUUU-  -
  nSU RJ                  -
  U RM                  U5      -  nUR                  SS5      RO                  5       nURQ                  XS5      nU RS                  U5      nUU4$ )
Nr   rG   rU   rT   r   rH   rF   r   rI   dtype)ptraining)*ro   r}   r~   r   viewrv   rd   	transposerb   rZ   updateri   re   rx   rK   rL   chunkrepeatmatmulrg   sqrtrJ   r   
functionalsoftmaxfloat32tor   dropoutrt   r   rh   sumr   r   r   r   r   r   
contiguousr`   r   )r4   r[   r   r   rV   r   r   r   kwargsbsz
target_len_q_lenquery_states
key_statesvalue_statesrT   rU   cache_kwargsattn_weightscausal_masklambda_1lambda_2lambda_fullattn_outputattn_output1attn_output2s                              r6   r<   DiffLlamaAttention.forward   sd    +//1{{=1[[/
{{=1#((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&S#7RUWZ#[ %#&snUL'5'<'<ZW[WeWegs't$Jz+D+DE
 /H/HIyy\1!!D"M#**1aA6||L2F2Fq!2LMPTPYPYZ^ZgZgPhh%(Aq2HJ4D4DR4H2H)HIK'+5L }},,\r,WZZ[g[m[mn}},,\T=S=S^b^k^k,l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<ll<>%*[[aQ%G"l"[<%??4+++t~~k/JJ!++Aq1<<>!))#b9kk+.L((r8   )rt   r*   r   rd   r+   r{   r~   r   r   r   r   r   ri   ry   rv   rx   rb   r   r}   rz   r   r:   NNNFN)r>   r?   r@   rA   __doc__r    r   intr)   rK   Tensortuple
LongTensorr   boolr<   rB   rC   rD   s   @r6   rl   rl   x   s    G j  j8C=  j  jL 2637*.59<)||<) #5<<#=><) !.	<)
 u//0<) !<) <) !!1!12<) 
u||Xell3XeELL>Q5RR	S<) <)r8   rl   c                   X  ^  \ rS rSrSrU 4S jr     SS\R                  S\\R                  \R                  4   S\	\R                     S\	\R                     S\	\   S	\S
\	\R                     S\\R                  \	\R                     \	\\R                        4   4S jjrSrU =r$ )DiffLlamaFlashAttention2   a>  
DiffLlama flash attention module. This module inherits from `DiffLlamaAttention` as the weights of the module stays
untouched. The only required change would be on the forward pass where it needs to correctly call the public API of
flash attention and deal with padding tokens in case the input contains any of them.
c                 D   > [         TU ]  " U0 UD6  [        5       U l        g r:   )r(   r)   r   _flash_attn_uses_top_left_mask)r4   argsr   r5   s      r6   r)   !DiffLlamaFlashAttention2.__init__   s#    $)&)
 /P.Q+r8   r[   r   r   rV   r   r   r   r]   c                 H
   [        U[        5      (       a  [        S5      eUR                  5       u  pn
U R	                  U5      nU R                  U5      nU R                  U5      nUR                  XU R                  U R                  5      R                  SS5      nUR                  XU R                  U R                  5      R                  SS5      nUR                  XU R                  U R                  5      R                  SS5      nUc)  [        R                  S5        U R                  X5      u  pOUu  p[        XX5      u  pUb$  XUS.nUR!                  XU R"                  U5      u  pUR                  SS5      nUR                  SS5      nUR                  SS5      nU R$                  (       a  U R&                  OSnUR(                  nUR*                  R,                  S:w  a  UR*                  R,                  OSnU[.        R0                  :X  a  [.        R2                  " 5       (       aA  [5        [.        S	5      (       a  [.        R6                  " U5      O[.        R8                  " 5       nOR[5        U R:                  S
5      (       a  U R:                  R<                  nO U R                  R>                  R(                  n[        R                  SU S35        URA                  U5      nURA                  U5      nURA                  U5      n[.        RB                  " USSS9u  nnURE                  SSSS5      nURE                  SSSS5      n[G        UUUUU	UU[I        U SS 5      U RJ                  U RL                  S9
n[G        UUUUU	UU[I        U SS 5      U RJ                  U RL                  S9
n[.        RN                  " UU/SS9n[.        RB                  " USSS9u  nn[.        RP                  " [.        RR                  " U RT                  U RV                  -  S[.        R0                  S95      RA                  UR(                  5      n[.        RP                  " [.        RR                  " U RX                  U RZ                  -  S[.        R0                  S95      RA                  UR(                  5      nUU-
  U R\                  -   nUUU-  -
  nSU R\                  -
  U R_                  U5      -  nURa                  XS5      Rc                  5       nU Re                  U5      nUS 4$ )Nz`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformersr   rG   aY  The attention layers in this model are transitioning from computing the RoPE embeddings internally through `position_ids` (2D tensor with the indexes of the tokens), to using externally computed `position_embeddings` (Tuple of tensors, containing cos and sin). In v4.46 `position_ids` will be removed and `position_embeddings` will be mandatory.r           mpscpuget_autocast_dtype_pre_quantization_dtypezThe input hidden states seems to be silently casted in float32, this might be related to the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in .rH   sliding_window)rV   r   r   use_top_left_maskr{   rF   r   )3
isinstancer
   
ValueErrorro   r}   r~   r   r   rv   rd   r   rb   rr   rs   
rotary_embrZ   r   ri   r   rt   r   devicetyperK   r   is_autocast_enabledhasattrr   get_autocast_gpu_dtyper*   r   weightr   r   r   r   rw   r   r{   rL   rh   r   r   r   r   r   r   r   r`   r   r   )r4   r[   r   r   rV   r   r   r   r   r   r   r   r   r   rT   rU   r   dropout_rateinput_dtypedevice_typetarget_dtypevalue_states1value_states2r   r   r   r   r   r   s                                r6   r<    DiffLlamaFlashAttention2.forward   s    nk22} 
 &**,A{{=1[[/
{{=1
 $((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&G |BHC*HC#7RU#[ %#&nUL'5'<'<ZW[WeWegs't$J $--a3))!Q/
#--a315t--C #((2>2E2E2J2Je2Sl))..Y^%--'((** u&:;; ,,[9557  &?@@#{{BB#{{1177 >$ (??<8L#|4J'??<8L',{{<'J$}%,,Q1a8%,,Q1a8/% "4)94@"AAnn
 0% "4)94@"AAnn
 ii| <"E%*[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!))#b9DDFkk+.D  r8   )r   r   )r>   r?   r@   rA   r   r)   rK   r   r   r   r   r   r   r<   rB   rC   rD   s   @r6   r   r      s    R 6:37*.59B!||B! #5<<#=>B! !!1!12	B!
 u//0B! !B! B! !!1!12B! 
u||Xell3XeELL>Q5RR	SB! B!r8   r   c                   D   \ rS rSrSr     SS\R                  S\\R                  \R                  4   S\\R                     S\\R                     S\\
   S	\S
\\R                     S\\R                  \\R                     \\\R                        4   4S jjrSrg)DiffLlamaSdpaAttentionip  z
DiffLlama attention module using torch.nn.functional.scaled_dot_product_attention. This module inherits from
`DiffLlamaAttention` as the weights of the module stays untouched. The only changes are on the forward pass to adapt to
SDPA API.
Nr[   r   r   rV   r   r   r   r]   c           	         UR                  5       u  pnU R                  U5      nU R                  U5      nU R                  U5      nUR	                  XU R
                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUR	                  XU R                  U R                  5      R                  SS5      nUu  nn[        XUU5      u  pUb$  UXS.nUR                  XU R                  U5      u  p[        XR                  5      n[        XR                  5      n[        R                  " [        R                   " USSS9SS9nUR#                  SSSS5      nUnUb  US S 2S S 2S S 2S UR$                  S   24   nUR&                  R(                  S:X  a3  Ub0  UR+                  5       nUR+                  5       nUR+                  5       nUS L =(       a    U
S:  n[        R,                  R.                  R1                  UUUUU R2                  (       a  U R4                  OSUS	9n[        R                   " USSS9u  nn[        R6                  " [        R8                  " U R:                  U R<                  -  S[        R>                  S
95      RA                  URB                  5      n[        R6                  " [        R8                  " U RD                  U RF                  -  S[        R>                  S
95      RA                  URB                  5      nUU-
  U RH                  -   nUUU-  -
  nSU RH                  -
  U RK                  U5      -  nUR                  SS5      R+                  5       nUR	                  XS5      nU RM                  U5      nUS 4$ )Nr   rG   r   rH   rF   r   cudar   )	attn_mask	dropout_pr{   r   )'ro   r}   r~   r   r   rv   rd   r   rb   rZ   r   ri   re   rx   rK   rL   r   r   rJ   r   r   r   r   r   scaled_dot_product_attentionr   rt   rh   r   r   r   r   r   r   r   r   r   r   r   )r4   r[   r   r   rV   r   r   r   r   r   r   r   r   r   r   rT   rU   r   r   r{   r   r   r   r   r   r   s                             r6   r<   DiffLlamaSdpaAttention.forwardx  sU    &**,A{{=1[[/
{{=1#((T^^T]]S]]^_abc__S1I1I4==Yccdeghi
#((T5M5Mt}}]gghiklm&S#7RUWZ#[ %#&sUL'5'<'<ZW[WeWegs't$Jz+D+DE
 /H/HIyy\1!!D"M#**1aA6$%%aA/E1A1A"1E/E&EFK ##v-+2I'224L#..0J'224L  4'5EAI	hh))FF!04d,,3 G 
 &+[[aQ%G"l99UYYt~~'FBV[VcVcdehh
 99UYYt~~'FBV[VcVcdehh
 )D,<,<<"[<%??4+++t~~k/JJ!++Aq1<<>!&&s26kk+.D  r8    r   )r>   r?   r@   rA   r   rK   r   r   r   r   r   r   r<   rB   r   r8   r6   r   r   p  s     2637*.59I!||I! #5<<#=>I! !.	I!
 u//0I! !I! I! !!1!12I! 
u||Xell3XeELL>Q5RR	SI! I!r8   r   r   c                   8   ^  \ rS rSrSU 4S jjrS rS rSrU =r$ )DiffLlamaRMSNormi  c                    > [         TU ]  5         [        R                  " [        R
                  " U5      5      U l        X l        g)z/
DiffLlamaRMSNorm is equivalent to T5LayerNorm
N)r(   r)   r   r   rK   onesr   variance_epsilon)r4   r+   rp   r5   s      r6   r)   DiffLlamaRMSNorm.__init__  s/     	ll5::k#:; #r8   c                    UR                   nUR                  [        R                  5      nUR	                  S5      R                  SSS9nU[        R                  " X0R                  -   5      -  nU R                  UR                  U5      -  $ )NrG   rF   T)keepdim)	r   r   rK   r   powmeanrsqrtr   r   )r4   r[   r   variances       r6   r<   DiffLlamaRMSNorm.forward  sw    #))%((7 $$Q',,R,>%H?T?T4T(UU{{]--k:::r8   c                 ^    [        U R                  R                  5       SU R                   3$ )Nz, eps=)r   r   rJ   r   r4   s    r6   
extra_reprDiffLlamaRMSNorm.extra_repr  s*    ))*+6$2G2G1HIIr8   )r   r   )gư>)	r>   r?   r@   rA   r)   r<   r   rB   rC   rD   s   @r6   r   r     s    $;J Jr8   r   )eagerflash_attention_2sdpac                   8  ^  \ rS rSrS\S\4U 4S jjr      SS\R                  S\	\R                     S\	\R                     S\	\   S	\	\   S
\	\R                     S\	\\R                  \R                  4      S\\   S\\R                     4S jjrSrU =r$ )DiffLlamaDecoderLayeri  r*   ri   c                 (  > [         TU ]  5         UR                  U l        [        UR                     " XS9U l        [        U5      U l        [        UR                  UR                  S9U l
        [        UR                  UR                  S9U l        g )N)r*   ri   rp   )r(   r)   r+   DIFFLLAMA_ATTENTION_CLASSES_attn_implementation	self_attnr"   mlpr   r   input_layernormpost_attention_layernormr   s      r6   r)   DiffLlamaDecoderLayer.__init__  sw    !--4V5P5PQY_u'/0B0BH[H[\(89K9KQWQdQd(e%r8   r[   r   rV   r   r   r   r   r   r]   c                     Un	U R                  U5      nU R                  " SUUUUUUUS.UD6u  pX-   nUn	U R                  U5      nU R                  U5      nX-   nU$ )N)r[   r   rV   r   r   r   r   r   )r  r  r  r  )r4   r[   r   rV   r   r   r   r   r   residualr   s              r6   r<   DiffLlamaDecoderLayer.forward  s     !,,];>> 	
')%)) 3	
 	
 !0 !55mD/ 0r8   )r+   r  r  r  r  )NNNFNN)r>   r?   r@   rA   r    r   r)   rK   r   r   r   r   r   r   r   r   r<   rB   rC   rD   s   @r6   r   r     s    f f3 f 2637*.$)59KO|| !. u//0	
 ! D> !!1!12 &eELL%,,,F&GH +, 
u||	 r8   r   c                   f   ^  \ rS rSr% \\S'   SrSrS/rS/r	Sr
SrSrSrSr\\S.rU 4S	 jrS
rU =r$ )DiffLlamaPreTrainedModeli  r*   modelTr   past_key_valuesF)r[   
attentionsc                    > [         TU ]  U5        [        U[        5      (       a  UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        UR                  R
                  R                  SU R                  R                  5        g g )Nr   )r(   _init_weightsr   rl   r   datanormal_r*   r   r   r   r   )r4   moduler5   s     r6   r  &DiffLlamaPreTrainedModel._init_weights  s    f%f011!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH!!))!T[[-G-GH	 2r8   r   )r>   r?   r@   rA   r    __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   rl   _can_record_outputsr  rB   rC   rD   s   @r6   r  r    s^    &*#01#4"5N!"'.(
I Ir8   r  c                   l   ^  \ rS rSrSS\4U 4S jjjr\R                  " 5       \S 5       5       r	Sr
U =r$ )DiffLlamaRotaryEmbeddingi(  r*   c                   > [         TU ]  5         [        US5      (       aZ  [        UR                  [
        5      (       a;  UR                  R                  SUR                  R                  S5      5      U l        OSU l        UR                  U l	        UR                  U l
        Xl        [        U R                     U l        U R                  U R                  U5      u  o0l        U R                  SUSS9  U R                   U l        g )Nrope_scaling	rope_typer   defaultinv_freqF)
persistent)r(   r)   r   r   r%  dictgetr&  ry   max_seq_len_cachedoriginal_max_seq_lenr*   r   rope_init_fnattention_scalingregister_bufferr(  original_inv_freq)r4   r*   r   r(  r5   s       r6   r)   !DiffLlamaRotaryEmbedding.__init__)  s    6>**z&:M:Mt/T/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q((ZeD!%r8   c                 b   U R                   S S S 2S 4   R                  5       R                  UR                  S   SS5      R	                  UR
                  5      nUS S 2S S S 24   R                  5       n[        UR
                  R                  [        5      (       a0  UR
                  R                  S:w  a  UR
                  R                  OSn[        R                  " USS9   UR                  5       UR                  5       -  R                  SS5      n[        R                  " Xf4SS	9nUR                  5       U R                  -  nUR                  5       U R                  -  n	S S S 5        WR	                  UR                   S
9W	R	                  UR                   S
94$ ! , (       d  f       N@= f)Nr   rF   r   r   r   F)r   enabledrG   rH   )r   )r(  floatr_   rJ   r   r   r   r   strrK   autocastr   rL   rT   r/  rU   r   )
r4   r;   rV   inv_freq_expandedposition_ids_expandedr   freqsembrT   rU   s
             r6   r<    DiffLlamaRotaryEmbedding.forward:  sR    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E'E!((--[`J`ahhmmfk^^UC&,,.1F1L1L1NNYYZ[]^_E))UN3C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs   $BF  
F.)r/  r*   r,  r1  r-  r.  r&  r:   )r>   r?   r@   rA   r    r)   rK   no_gradr   r<   rB   rC   rD   s   @r6   r#  r#  (  s6    / / /" ]]_<  <r8   r#  c                     ^  \ rS rSrS\4U 4S jjr\\       SS\\	R                     S\\	R                     S\\	R                     S\\   S\\	R                     S	\\	R                     S
\\   S\\   S\4S jj5       5       rSrU =r$ )DiffLlamaModeliJ  r*   c           	        > [         TU ]  U5        UR                  U l        UR                  U l        [
        R                  " UR                  UR                  U R                  5      U l        [
        R                  " [        UR                  5       Vs/ sH  n[        X5      PM     sn5      U l        [        UR                  UR                  S9U l        [#        US9U l        SU l        U R)                  5         g s  snf )Nr  )r*   F)r(   r)   pad_token_idpadding_idx
vocab_sizer   	Embeddingr+   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normr#  r   gradient_checkpointing	post_initr   s      r6   r)   DiffLlamaModel.__init__L  s     !.. ++LL):):F<N<NPTP`P`ammGLVMeMeGfgGf)"65Gfg
 %V%7%7V=P=PQ	2&A&+# 	 hs   C>	input_idsr   rV   r  inputs_embedsr   r   r   r]   c           
      8   US L US L-  (       a  [        S5      eUc  U R                  U5      nU(       a  Uc
  [        5       nUcD  Ub  UR                  5       OSn	[        R
                  " XUR                  S   -   UR                  S9nUc  UR                  S5      n[        U R                  UUUUUS9n
UnU R                  X5      nU R                  S U R                  R                    H  nU" U4U
UUUUS.UD6nM     U R                  U5      n[        UUS9$ )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   )r   )r*   input_embedsr   r   r  rV   )r   rV   r   r   r   )last_hidden_stater  )r   rE  r	   get_seq_lengthrK   arangerJ   r   rQ   r   r*   r   rI  rH  rJ  r   )r4   rN  r   rV   r  rO  r   r   r   past_seen_tokensr   r[   r   decoder_layers                 r6   r<   DiffLlamaModel.forward\  sK    -t";<YZZ *.*;*;I*FM0*nO!CRC^==?de+0<< ]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oomJ![[)H4;;+H+HIM)*).-$7 M J 		-0&++
 	
r8   )rE  rK  rI  rJ  rB  r   rC  )NNNNNNN)r>   r?   r@   rA   r    r)   r   r   r   rK   r   r   r   FloatTensorr   r   r   r   r<   rB   rC   rD   s   @r6   r?  r?  J  s        151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r8   r?  c                   ~  ^  \ rS rSrS/rSS0rSS/S/40rU 4S jrS rS	 r	\
\         SS
\\R                     S\\R                     S\\R                     S\\   S\\R"                     S\\R                     S\\   S\\R                     S\\\R                  4   S\\   S\4S jj5       5       rSrU =r$ )DiffLlamaForCausalLMi  zlm_head.weightlm_headcolwise_repr[   logitsc                    > [         TU ]  U5        [        U5      U l        UR                  U l        [
        R                  " UR                  UR                  SS9U l        U R                  5         g r%   )
r(   r)   r?  r  rC  r   r-   r+   r[  rL  r3   s     r6   r)   DiffLlamaForCausalLM.__init__  sU     #F+
 ++yy!3!3V5F5FUS 	r8   c                     Xl         g r:   r  )r4   decoders     r6   set_decoder DiffLlamaForCausalLM.set_decoder  s    
r8   c                     U R                   $ r:   ra  r   s    r6   get_decoder DiffLlamaForCausalLM.get_decoder  s    zzr8   rN  r   rV   r  rO  labelsr   r   logits_to_keepr   r]   c
                 ~   U R                   " SUUUUUUUS.U
D6nUR                  n[        U	[        5      (       a  [	        U	* S5      OU	nU R                  USS2USS24   5      nSnUb)  U R                  " SXU R                  R                  S.U
D6n[        UUUR                  UR                  UR                  S9$ )a,  
Example:

```python
>>> from transformers import AutoTokenizer, DiffLlamaForCausalLM

>>> model = DiffLlamaForCausalLM.from_pretrained("google/diffllama-7b")
>>> tokenizer = AutoTokenizer.from_pretrained("google/diffllama-7b")

>>> prompt = "What is your favorite condiment?"
>>> inputs = tokenizer(prompt, return_tensors="pt")

>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"What is your favorite condiment?"
```)rN  r   rV   r  rO  r   r   N)r]  rh  rC  )lossr]  r  r[   r  r   )r  rR  r   r   slicer[  loss_functionr*   rC  r   r  r[   r  )r4   rN  r   rV   r  rO  rh  r   r   ri  r   outputsr[   slice_indicesr]  rk  s                   r6   r<   DiffLlamaForCausalLM.forward  s    @ ,0:: 	,
)%+')	,
 	,
  118B>SV8W8W~ot4]kmA}a,?@A%%pVt{{OeOepiopD%#33!//))
 	
r8   )r[  r  rC  )	NNNNNNNNr   )r>   r?   r@   rA   _tied_weights_keys_tp_plan_pp_planr)   rc  rf  r   r   r   rK   r   r   r   rX  r   r   r   r   r   r   r<   rB   rC   rD   s   @r6   rZ  rZ    s:   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r8   rZ  c                       \ rS rSrSrg)"DiffLlamaForSequenceClassificationi  r   Nr>   r?   r@   rA   rB   r   r8   r6   ru  ru        r8   ru  c                       \ rS rSrSrSrg)DiffLlamaForQuestionAnsweringi  transformerr   N)r>   r?   r@   rA   r  rB   r   r8   r6   ry  ry    s    %r8   ry  c                       \ rS rSrSrg)DiffLlamaForTokenClassificationi  r   Nrv  r   r8   r6   r|  r|    rw  r8   r|  )r  r?  rZ  ru  ry  r|  )Nr   )Frg   typingr   r   rK   r   activationsr   cache_utilsr   r	   r
   
generationr   integrationsr   masking_utilsr   modeling_flash_attention_utilsr   r   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   processing_utilsr   utilsr   r   r   r   utils.genericr   configuration_diffllamar    
get_loggerr>   rr   Moduler"   rO   rZ   r   r   re   rj   rl   r   r   r   r  r   r  r#  r?  rZ  ru  ry  r|  __all__r   r8   r6   <module>r     s  0  "   ! ; ; ) 7 / i  P K - & R R / 4 
		H	%299  (6	UU\\ 	U# 	U%,, 	U2a) a)HQ!1 Q!hQ!/ Q!h Y'Jryy J (J*  1" *6 *Z I I I4<ryy <D K
- K
 K
\ N
3_ N
 N
b	)IKc 	&$?AY &	&CE] 	r8   