
    <htP                     p   S SK JrJrJr  S SKrS SKJr  S SKJr  SSKJ	r	  SSK
JrJr  SSKJr  SS	KJr  SS
KJrJr  SSKJr  SSKJrJrJrJr  SSKJrJr  SSKJrJ r   SSK!J"r"J#r#  SSK$J%r%  SSK&J'r'J(r(J)r)  SSK*J+r+   " S S\RX                  5      r-S r.S;S jr/S\R`                  S\1S\R`                  4S jr2 S<S\RX                  S\R`                  S\R`                  S\R`                  S \\R`                     S!\3S"\3S#\%\'   4S$ jjr4 " S% S&\RX                  5      r5\" S'5       " S( S)\RX                  5      5       r6 " S* S+\5      r7\( " S, S-\#5      5       r8 " S. S/\RX                  5      r9\( " S0 S1\85      5       r:\( " S2 S3\8\5      5       r; " S4 S5\\85      r< " S6 S7\\85      r= " S8 S9\\85      r>/ S:Qr?g)=    )CallableOptionalUnionN)nn)check_model_inputs   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple   )MistralConfigc                   .   ^  \ rS rSrU 4S jrS rSrU =r$ )
MistralMLP"   c                   > [         TU ]  5         Xl        UR                  U l        UR                  U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l	        [        UR                     U l        g NFbias)super__init__confighidden_sizeintermediate_sizer   Linear	gate_projup_proj	down_projr	   
hidden_actact_fnselfr*   	__class__s     d/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/mistral/modeling_mistral.pyr)   MistralMLP.__init__#   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../    c                     U R                  U R                  U R                  U5      5      U R                  U5      -  5      nU$ N)r0   r2   r.   r/   )r4   xr0   s      r6   forwardMistralMLP.forward-   s6    NN4;;t~~a/@#ADLLQRO#ST	r8   )r2   r*   r0   r.   r+   r,   r/   )__name__
__module____qualname____firstlineno__r)   r<   __static_attributes____classcell__r5   s   @r6   r"   r"   "   s    0 r8   r"   c                     U SSU R                   S   S-  24   nU SU R                   S   S-  S24   n[        R                  " U* U4SS9$ )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)r;   x1x2s      r6   rotate_halfrO   2   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r8   c                     UR                  U5      nUR                  U5      nX-  [        U 5      U-  -   nX-  [        U5      U-  -   nXg4$ )a  Applies Rotary Position Embedding to the query and key tensors.

Args:
    q (`torch.Tensor`): The query tensor.
    k (`torch.Tensor`): The key tensor.
    cos (`torch.Tensor`): The cosine part of the rotary embedding.
    sin (`torch.Tensor`): The sine part of the rotary embedding.
    position_ids (`torch.Tensor`, *optional*):
        Deprecated and unused.
    unsqueeze_dim (`int`, *optional*, defaults to 1):
        The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
        sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
        that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
        k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
        cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
        the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
    `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
)	unsqueezerO   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r6   apply_rotary_pos_embrZ   9   sS    ( --
&C
--
&Cw;q>C/0Gw;q>C/0Gr8   hidden_statesn_repreturnc                     U R                   u  p#pEUS:X  a  U $ U SS2SS2SSS2SS24   R                  X#XU5      n U R                  X#U-  XE5      $ )z
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
r   N)rJ   expandreshape)r[   r\   batchnum_key_value_headsslenhead_dims         r6   	repeat_kvre   T   s_    
 2?1D1D.Ez!!Qa"23::5W\dlmM  e(CTTTr8   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 @   [        X R                  5      n[        X0R                  5      n	[        R                  " XR	                  SS5      5      U-  n
Ub"  US S 2S S 2S S 2S UR
                  S   24   nX-   n
[        R                  R                  U
S[        R                  S9R                  UR                  5      n
[        R                  R                  XU R                  S9n
[        R                  " X5      nUR	                  SS5      R                  5       nX4$ )NrG   r   rF   )rI   dtype)ptrainingr   )re   num_key_value_groupsrK   matmul	transposerJ   r   
functionalsoftmaxfloat32torp   rl   rr   
contiguous)rf   rg   rh   ri   rj   rk   rl   rm   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r6   eager_attention_forwardr   `   s     3 ; ;<JU$?$?@L<<';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#1==((2U]](SVVW\WbWbcL==((6??([L,,|:K''1-88:K$$r8   c                   F  ^  \ rS rSrSrS\S\4U 4S jjr  SS\R                  S\
\R                  \R                  4   S\\R                     S	\\   S
\\R                     S\\   S\
\R                  \\R                     \\
\R                        4   4S jjrSrU =r$ )MistralAttentionz   z=Multi-headed attention from 'Attention Is All You Need' paperr*   	layer_idxc                   > [         TU ]  5         Xl        X l        [	        USS 5      =(       d    UR
                  UR                  -  U l        UR                  UR                  -  U l	        U R                  S-  U l
        UR                  U l        SU l        [        R                  " UR
                  UR                  U R                  -  SS9U l        [        R                  " UR
                  UR                  U R                  -  SS9U l        [        R                  " UR
                  UR                  U R                  -  SS9U l        [        R                  " UR                  U R                  -  UR
                  SS9U l        g )Nrd   g      TFr&   )r(   r)   r*   r   getattrr+   num_attention_headsrd   rb   rs   rk   attention_dropout	is_causalr   r-   q_projk_projv_projo_projr4   r*   r   r5   s      r6   r)   MistralAttention.__init__}   s.   "
D9mV=O=OSYSmSm=m$*$>$>&B\B\$\!}}d*!'!9!9ii 2 2F4N4NQUQ^Q^4^ejkii 2 2F4N4NQUQ^Q^4^ejkii 2 2F4N4NQUQ^Q^4^ejkii : :T]] JFL^L^ejkr8   r[   position_embeddingsrj   past_key_valuecache_positionrm   r]   c           
      `   UR                   S S n/ UQSPU R                  P7nU R                  U5      R                  U5      R	                  SS5      n	U R                  U5      R                  U5      R	                  SS5      n
U R                  U5      R                  U5      R	                  SS5      nUu  p[        XX5      u  pUb$  XUS.nUR                  XU R                  U5      u  p[        nU R                  R                  S:w  a  [        U R                  R                     nU" U U	U
UU4U R                  (       d  SOU R                  U R                   [#        U R                  SS 5      S.UD6u  nnUR$                  " / UQSP76 R'                  5       nU R)                  U5      nUU4$ )	NrF   r   rG   )rU   rT   r   eager        sliding_window)rl   rk   r   )rJ   rd   r   viewru   r   r   rZ   updater   r   r*   _attn_implementationr   rr   r   rk   r   r`   rz   r   )r4   r[   r   rj   r   r   rm   input_shapehidden_shapequery_statesr{   r|   rT   rU   cache_kwargsattention_interfacer   r}   s                     r6   r<   MistralAttention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&#7RU#[ %#&nUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7
%
  $}}C$2H2HLL"4;;0@$G
%
 
%
!\ "));;;;FFHkk+.L((r8   )r   r*   rd   r   r   r   rs   r   r   rk   r   )NN)r>   r?   r@   rA   __doc__r    intr)   rK   Tensortupler   r
   
LongTensorr   r   r<   rB   rC   rD   s   @r6   r   r   z   s    Gl} l l& +/59*)||*) #5<<#=>*) !.	*)
 !*) !!1!12*) -.*) 
u||Xell3XeELL>Q5RR	S*) *)r8   r   RMSNormc                   8   ^  \ rS rSrSU 4S jjrS rS rSrU =r$ )MistralRMSNorm   c                    > [         TU ]  5         [        R                  " [        R
                  " U5      5      U l        X l        g)z-
MistralRMSNorm is equivalent to T5LayerNorm
N)r(   r)   r   	ParameterrK   onesweightvariance_epsilon)r4   r+   epsr5   s      r6   r)   MistralRMSNorm.__init__   s/     	ll5::k#:; #r8   c                    UR                   nUR                  [        R                  5      nUR	                  S5      R                  SSS9nU[        R                  " X0R                  -   5      -  nU R                  UR                  U5      -  $ )NrG   rF   T)keepdim)	rp   ry   rK   rx   powmeanrsqrtr   r   )r4   r[   input_dtypevariances       r6   r<   MistralRMSNorm.forward   sw    #))%((7 $$Q',,R,>%H?T?T4T(UU{{]--k:::r8   c                 ^    [        U R                  R                  5       SU R                   3$ )Nz, eps=)r   r   rJ   r   r4   s    r6   
extra_reprMistralRMSNorm.extra_repr   s*    ))*+6$2G2G1HIIr8   )r   r   )gư>)	r>   r?   r@   rA   r)   r<   r   rB   rC   rD   s   @r6   r   r      s    $;J Jr8   r   c                   8  ^  \ rS rSrS\S\4U 4S jjr      SS\R                  S\	\R                     S\	\R                     S\	\   S	\	\   S
\	\R                     S\	\\R                  \R                  4      S\\   S\\R                     4S jjrSrU =r$ )MistralDecoderLayer   r*   r   c                   > [         TU ]  5         UR                  U l        [        XS9U l        [        U5      U l        [        UR                  UR                  S9U l	        [        UR                  UR                  S9U l
        g )N)r*   r   r   )r(   r)   r+   r   	self_attnr"   mlpr   rms_norm_epsinput_layernormpost_attention_layernormr   s      r6   r)   MistralDecoderLayer.__init__   sj    !--)Mf%-f.@.@fFYFYZ(6v7I7IvObOb(c%r8   r[   rj   rV   r   	use_cacher   r   rm   r]   c                     Un	U R                  U5      nU R                  " SUUUUUUUS.UD6u  pX-   nUn	U R                  U5      nU R                  U5      nX-   nU$ )N)r[   rj   rV   r   r   r   r    )r   r   r   r   )r4   r[   rj   rV   r   r   r   r   rm   residual_s              r6   r<   MistralDecoderLayer.forward   s     !,,];>> 	
')%)) 3	
 	
 !0 !55mD/ 0r8   )r+   r   r   r   r   )NNNFNN)r>   r?   r@   rA   r    r   r)   rK   r   r   r   r
   boolr   r   r   r<   rB   rC   rD   s   @r6   r   r      s    d} d d 2637*.$)59KO|| !. u//0	
 ! D> !!1!12 &eELL%,,,F&GH +, 
u||	 r8   r   c                   R    \ rS rSr% \\S'   SrSrS/rS/r	Sr
SrSrSrSr\\S.rSrg	)
MistralPreTrainedModel   r*   modelTr   past_key_values)r[   
attentionsr   N)r>   r?   r@   rA   r    __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsrB   r   r8   r6   r   r      sQ    &*#./#4"5N!"&,&r8   r   c                   l   ^  \ rS rSrSS\4U 4S jjjr\R                  " 5       \S 5       5       r	Sr
U =r$ )MistralRotaryEmbeddingi  r*   c                   > [         TU ]  5         [        US5      (       aZ  [        UR                  [
        5      (       a;  UR                  R                  SUR                  R                  S5      5      U l        OSU l        UR                  U l	        UR                  U l
        Xl        [        U R                     U l        U R                  U R                  U5      u  o0l        U R                  SUSS9  U R                   U l        g )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)r(   r)   hasattr
isinstancer   dictgetr   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr*   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r4   r*   devicer   r5   s       r6   r)   MistralRotaryEmbedding.__init__  s    6>**z&:M:Mt/T/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q((ZeD!%r8   c                 b   U R                   S S S 2S 4   R                  5       R                  UR                  S   SS5      R	                  UR
                  5      nUS S 2S S S 24   R                  5       n[        UR
                  R                  [        5      (       a0  UR
                  R                  S:w  a  UR
                  R                  OSn[        R                  " USS9   UR                  5       UR                  5       -  R                  SS5      n[        R                  " Xf4SS	9nUR                  5       U R                  -  nUR                  5       U R                  -  n	S S S 5        WR	                  UR                   S
9W	R	                  UR                   S
94$ ! , (       d  f       N@= f)Nr   rF   r   mpscpuF)device_typeenabledrG   rH   )rp   )r   floatr_   rJ   ry   r   r   r   strrK   autocastru   rL   rT   r   rU   rp   )
r4   r;   rV   inv_freq_expandedposition_ids_expandedr   freqsembrT   rU   s
             r6   r<   MistralRotaryEmbedding.forward  sR    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E'E!((--[`J`ahhmmfk^^UC&,,.1F1L1L1NNYYZ[]^_E))UN3C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs   $BF  
F.)r   r*   r   r   r   r   r   r:   )r>   r?   r@   rA   r    r)   rK   no_gradr   r<   rB   rC   rD   s   @r6   r   r     s6    /} / /" ]]_<  <r8   r   c                     ^  \ rS rSrS\4U 4S jjr\\       SS\\	R                     S\\	R                     S\\	R                     S\\   S\\	R                     S	\\   S
\\	R                     S\\   S\4S jj5       5       rSrU =r$ )MistralModeli-  r*   c           	        > [         TU ]  U5        UR                  U l        UR                  U l        [
        R                  " UR                  UR                  U R                  5      U l        [
        R                  " [        UR                  5       Vs/ sH  n[        X5      PM     sn5      U l        [        UR                  UR                  S9U l        [#        US9U l        SU l        U R)                  5         g s  snf )Nr   )r*   F)r(   r)   pad_token_idpadding_idx
vocab_sizer   	Embeddingr+   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normr   
rotary_embgradient_checkpointing	post_initr   s      r6   r)   MistralModel.__init__/  s     !.. ++LL):):F<N<NPTP`P`ammEJ6KcKcEdeEd	 3Ede
 #6#5#56;N;NO	0?&+# 	 fs   C>	input_idsrj   rV   r   inputs_embedsr   r   rm   r]   c                    US L US L-  (       a  [        S5      eUc  U R                  U5      nU(       a  Uc
  [        5       nUcD  Ub  UR                  5       OSn	[        R
                  " XUR                  S   -   UR                  S9nUc  UR                  S5      nU R                  R                  c  [        O[        n
U
" U R                  UUUUUS9nUnU R                  X5      nU R                  S U R                  R                    H  nU" U4UUUUUUS.UD6nM     U R!                  U5      n[#        UU(       a  US9$ S S9$ )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   )r   )r*   input_embedsrj   r   r   rV   )rj   rV   r   r   r   r   )last_hidden_stater   )
ValueErrorr
  r   get_seq_lengthrK   arangerJ   r   rQ   r*   r   r   r   r  r  r  r  r   )r4   r  rj   rV   r   r  r   r   rm   past_seen_tokensmask_functionr~   r[   r   decoder_layers                  r6   r<   MistralModel.forward?  su    -t";<YZZ  --i8M0*nO!CRC^==?de"\\ ]5H5H5K"KTaThThN )33A6L.2kk.H.H.P*Vw#;;&))+%
 &"oomJ![[)H4;;+H+HIM)	*).#-$7	 	M J 		-0&+/8O
 	
>B
 	
r8   )r
  r  r  r  r  r  r  )NNNNNNN)r>   r?   r@   rA   r    r)   r   r   r   rK   r   r   r
   FloatTensorr   r   r   r   r<   rB   rC   rD   s   @r6   r  r  -  s    }    151537+/59$(599
E,,-9
 !.9
 u//0	9

 "%9
   1 129
 D>9
 !!1!129
 +,9
 
!9
  9
r8   r  c                   ~  ^  \ rS rSrS/rSS0rSS/S/40rU 4S jrS rS	 r	\
\         SS
\\R                     S\\R                     S\\R                     S\\   S\\R"                     S\\R                     S\\   S\\R                     S\\\R                  4   S\\   S\4S jj5       5       rSrU =r$ )MistralForCausalLMi}  zlm_head.weightlm_headcolwise_repr[   logitsc                    > [         TU ]  U5        [        U5      U l        UR                  U l        [
        R                  " UR                  UR                  SS9U l        U R                  5         g r%   )
r(   r)   r  r   r  r   r-   r+   r#  r  r3   s     r6   r)   MistralForCausalLM.__init__  sU     !&)
 ++yy!3!3V5F5FUS 	r8   c                     Xl         g r:   r   )r4   decoders     r6   set_decoderMistralForCausalLM.set_decoder  s    
r8   c                     U R                   $ r:   r)  r   s    r6   get_decoderMistralForCausalLM.get_decoder  s    zzr8   r  rj   rV   r   r  labelsr   r   logits_to_keeprm   r]   c
                 ~   U R                   " SUUUUUUUS.U
D6nUR                  n[        U	[        5      (       a  [	        U	* S5      OU	nU R                  USS2USS24   5      nSnUb)  U R                  " SXU R                  R                  S.U
D6n[        UUUR                  UR                  UR                  S9$ )a{  
Example:

```python
>>> from transformers import AutoTokenizer, MistralForCausalLM

>>> model = MistralForCausalLM.from_pretrained("meta-mistral/Mistral-2-7b-hf")
>>> tokenizer = AutoTokenizer.from_pretrained("meta-mistral/Mistral-2-7b-hf")

>>> prompt = "Hey, are you conscious? Can you talk to me?"
>>> inputs = tokenizer(prompt, return_tensors="pt")

>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
```)r  rj   rV   r   r  r   r   N)r%  r0  r  )lossr%  r   r[   r   r   )r   r  r   r   slicer#  loss_functionr*   r  r   r   r[   r   )r4   r  rj   rV   r   r  r0  r   r   r1  rm   outputsr[   slice_indicesr%  r3  s                   r6   r<   MistralForCausalLM.forward  s    @ ,0:: 	,
)%+')	,
 	,
  118B>SV8W8W~ot4]kmA}a,?@A%%pVt{{OeOepiopD%#33!//))
 	
r8   )r#  r   r  )	NNNNNNNNr   )r>   r?   r@   rA   _tied_weights_keys_tp_plan_pp_planr)   r+  r.  r   r   r   rK   r   r   r
   r   r   r   r   r   r   r   r<   rB   rC   rD   s   @r6   r"  r"  }  s:   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r8   r"  c                       \ rS rSrSrg)MistralForTokenClassificationi  r   Nr>   r?   r@   rA   rB   r   r8   r6   r=  r=        r8   r=  c                       \ rS rSrSrg) MistralForSequenceClassificationi  r   Nr>  r   r8   r6   rA  rA    r?  r8   rA  c                       \ rS rSrSrg)MistralForQuestionAnsweringi  r   Nr>  r   r8   r6   rC  rC    s    X[r8   rC  )r"  rC  r  r   rA  r=  )Nr   )r   )@typingr   r   r   rK   r   transformers.utils.genericr   activationsr	   cache_utilsr
   r   
generationr   integrationsr   masking_utilsr   r   modeling_flash_attention_utilsr   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   configuration_mistralr    Moduler"   rO   rZ   r   r   re   r   r   r   r   r   r   r   r  r"  r=  rA  rC  __all__r   r8   r6   <module>rU     s   - ,   9 ! . ) 7 R B  P K F & I I 0  (6	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4;)ryy ;)| Y'JRYY J (J((4 (V _  $<RYY <D L
) L
 L
^ N
/ N
 N
b	$ACY 		'GI_ 	 \"=?U [r8   