
    hXT                     t   d dl mZmZmZ d dlZd dlmZ ddlmZ ddlm	Z	m
Z
 ddlmZ ddlmZ dd	lmZmZ dd
lmZ ddlmZmZmZmZ ddlmZmZ ddlmZmZ ddlm Z m!Z! ddl"m#Z# ddl$m%Z%m&Z&m'Z' ddl(m)Z) ddl*m+Z+ ddl,m-Z-  G d dej\                        Z/d Z0d<dZ1dejd                  de3dejd                  fdZ4	 d=dej\                  dejd                  dejd                  d ejd                  d!eejd                     d"e5d#e5d$e#e%   fd%Z6 G d& d'ej\                        Z7 ed(       G d) d*ej\                               Z8 G d+ d,e      Z9e& G d- d.e!             Z: G d/ d0ej\                        Z;e& G d1 d2e:             Z<e& G d3 d4e:e             Z= G d5 d6ee:      Z> G d7 d8ee:      Z? G d9 d:ee:      Z@g d;ZAy)>    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)use_kernel_forward_from_hub)create_causal_mask!create_sliding_window_causal_mask)FlashAttentionKwargs)GenericForQuestionAnswering GenericForSequenceClassificationGenericForTokenClassificationGradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)deprecate_kwarg)check_model_inputs   )MinistralConfigc                   $     e Zd Z fdZd Z xZS )MinistralMLPc                    t         |           || _        |j                  | _        |j                  | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _        t        j                  | j                  | j                  d      | _	        t        |j                     | _        y NFbias)super__init__confighidden_sizeintermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnselfr*   	__class__s     o/var/www/html/aiagenthome/venv/lib/python3.12/site-packages/transformers/models/ministral/modeling_ministral.pyr)   zMinistralMLP.__init__#   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../    c                     | j                  | j                  | j                  |            | j                  |      z        }|S N)r0   r2   r.   r/   )r4   xr0   s      r6   forwardzMinistralMLP.forward-   s6    NN4;;t~~a/@#ADLLQRO#ST	r7   )__name__
__module____qualname__r)   r;   __classcell__r5   s   @r6   r#   r#   "   s    0r7   r#   c                     | dd| j                   d   dz  f   }| d| j                   d   dz  df   }t        j                  | |fd      S )z*Rotates half the hidden dims of the input..N   dim)shapetorchcat)r:   x1x2s      r6   rotate_halfrK   2   sZ    	
3"!''"+"""	#B	
3q ""	#B99rc2YB''r7   c                     |j                  |      }|j                  |      }| |z  t        |       |z  z   }||z  t        |      |z  z   }||fS )a  Applies Rotary Position Embedding to the query and key tensors.

    Args:
        q (`torch.Tensor`): The query tensor.
        k (`torch.Tensor`): The key tensor.
        cos (`torch.Tensor`): The cosine part of the rotary embedding.
        sin (`torch.Tensor`): The sine part of the rotary embedding.
        position_ids (`torch.Tensor`, *optional*):
            Deprecated and unused.
        unsqueeze_dim (`int`, *optional*, defaults to 1):
            The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
            sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
            that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
            k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
            cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
            the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
    Returns:
        `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
    )	unsqueezerK   )qkcossinposition_idsunsqueeze_dimq_embedk_embeds           r6   apply_rotary_pos_embrV   9   sY    ( --
&C
--
&C3w;q>C/0G3w;q>C/0GGr7   hidden_statesn_repreturnc                     | j                   \  }}}}|dk(  r| S | dddddddddf   j                  |||||      } | j                  |||z  ||      S )z
    This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
    num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
    r    N)rF   expandreshape)rW   rX   batchnum_key_value_headsslenhead_dims         r6   	repeat_kvra   T   so    
 2?1D1D.Ehz!!Qa"23::5BUW\^bdlmM  (;e(CT8TTr7   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 T   t        || j                        }t        || j                        }	t        j                  ||j	                  dd            |z  }
|#|d d d d d d d |j
                  d   f   }|
|z   }
t        j                  j                  |
dt        j                        j                  |j                        }
t        j                  j                  |
|| j                        }
t        j                  |
|	      }|j	                  dd      j                         }||
fS )NrC   r   rB   )rE   dtype)ptrainingr    )ra   num_key_value_groupsrG   matmul	transposerF   r   
functionalsoftmaxfloat32torl   rh   rn   
contiguous)rb   rc   rd   re   rf   rg   rh   ri   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r6   eager_attention_forwardr|   `   s    3 ; ;<JUF$?$?@L<<z';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#k1==((2U]](SVVW\WbWbcL==((6??([L,,|\:K''1-88:K$$r7   c                   ,    e Zd ZdZdef fdZ eddd      	 	 ddej                  d	e	ej                  ej                  f   d
e
ej                     de
e   de
ej                     dee   de	ej                  e
ej                     f   fd       Z xZS )MinistralAttentionz=Multi-headed attention from 'Attention Is All You Need' paper	layer_idxc                 j   t         |           || _        || _        t	        |d|j
                  |j                  z        | _        |j                  |j                  z  | _	        | j                  dz  | _
        |j                  | _        d| _        t        j                  |j
                  |j                  | j                  z  d      | _        t        j                  |j
                  |j                  | j                  z  d      | _        t        j                  |j
                  |j                  | j                  z  d      | _        t        j                  |j                  | j                  z  |j
                  d      | _        |j&                  |   dk(  r|j(                  | _        y d | _        y )Nr`   g      TFr&   sliding_attention)r(   r)   r*   r   getattrr+   num_attention_headsr`   r^   ro   rg   attention_dropout	is_causalr   r-   q_projk_projv_projo_projlayer_typessliding_windowr4   r*   r   r5   s      r6   r)   zMinistralAttention.__init__}   sU   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii 2 2F4N4NQUQ^Q^4^ejkii 2 2F4N4NQUQ^Q^4^ejkii 2 2F4N4NQUQ^Q^4^ejkii : :T]] JFL^L^ejk7=7I7I)7TXk7kf33qur7   past_key_valuepast_key_values4.58new_nameversionrW   position_embeddingsrf   cache_positionri   rY   c                 J   |j                   d d }g |d| j                  }| j                  |      j                  |      j	                  dd      }	| j                  |      j                  |      j	                  dd      }
| j                  |      j                  |      j	                  dd      }|\  }}t        |	|
||      \  }	}
|'|||d}|j                  |
|| j                  |      \  }
}t        }| j                  j                  dk7  rt        | j                  j                     } || |	|
||f| j                  sdn| j                  | j                   | j"                  d|\  }} |j$                  g |d j'                         }| j)                  |      }||fS )NrB   r    rC   )rQ   rP   r   eager        )rh   rg   r   )rF   r`   r   viewrq   r   r   rV   updater   r|   r*   _attn_implementationr   rn   r   rg   r   r\   rv   r   )r4   rW   r   rf   r   r   ri   input_shapehidden_shapequery_statesrw   rx   rP   rQ   cache_kwargsattention_interfacer{   ry   s                     r6   r;   zMinistralAttention.forward   s    $))#2.88b8$--8{{=166|DNNqRST[[/44\BLLQPQR
{{=166|DNNqRST&S#7jRUWZ#[ j&#&snUL'6'='=j,X\XfXfht'u$J(?;;++w6"9$++:Z:Z"[$7
%
  $}}C$2H2HLL..
%
 
%
!\ *k));;;;FFHkk+.L((r7   )NN)r<   r=   r>   __doc__intr)   r   rG   Tensortupler   r	   
LongTensorr   r   r;   r?   r@   s   @r6   r~   r~   z   s    Gv# v  %0A6R ,059*)||*) #5<<#=>*) !.	*)
 "%*) !!1!12*) -.*) 
u||Xell33	4*) S*)r7   r~   RMSNormc                   h     e Zd Zddeddf fdZdej                  dej                  fdZd Z xZ	S )	MinistralRMSNormepsrY   Nc                     t         |           t        j                  t	        j
                  |            | _        || _        y)z?
        MinistralRMSNorm is equivalent to T5LayerNorm
        N)r(   r)   r   	ParameterrG   onesweightvariance_epsilon)r4   r+   r   r5   s      r6   r)   zMinistralRMSNorm.__init__   s1     	ll5::k#:; #r7   rW   c                 "   |j                   }|j                  t        j                        }|j	                  d      j                  dd      }|t        j                  || j                  z         z  }| j                  |j                  |      z  S )NrC   rB   T)keepdim)	rl   ru   rG   rt   powmeanrsqrtr   r   )r4   rW   input_dtypevariances       r6   r;   zMinistralRMSNorm.forward   sy    #))%((7 $$Q',,R,>%Ht?T?T4T(UU{{]--k:::r7   c                 ^    t        | j                  j                         d| j                   S )Nz, eps=)r   r   rF   r   )r4   s    r6   
extra_reprzMinistralRMSNorm.extra_repr   s*    ))*+6$2G2G1HIIr7   )gư>)
r<   r=   r>   floatr)   rG   r   r;   r   r?   r@   s   @r6   r   r      s7    $ $$ $;U\\ ;ell ;Jr7   r   c                   >    e Zd Zdedef fdZ eddd      	 	 	 	 	 	 ddej                  d	e	ej                     d
e	ej                     de	e   de	e   de	ej                     de	eej                  ej                  f      dee   dej                  fd       Z xZS )MinistralDecoderLayerr*   r   c                 H   t         |           |j                  | _        t        ||      | _        t        |      | _        t        |j                  |j                        | _	        t        |j                  |j                        | _
        |j                  |   | _        y )N)r*   r   r   )r(   r)   r+   r~   	self_attnr#   mlpr   rms_norm_epsinput_layernormpost_attention_layernormr   attention_typer   s      r6   r)   zMinistralDecoderLayer.__init__   s    !--+6YO'/0B0BH[H[\(89K9KQWQdQd(e%$00;r7   r   r   r   r   rW   rf   rR   	use_cacher   r   ri   rY   c                     |}	| j                  |      } | j                  d|||||||d|\  }}
|	|z   }|}	| j                  |      }| j                  |      }|	|z   }|S )N)rW   rf   rR   r   r   r   r    )r   r   r   r   )r4   rW   rf   rR   r   r   r   r   ri   residual_s              r6   r;   zMinistralDecoderLayer.forward   s     !,,];)4>> 	
')%+) 3	
 	
q !=0 !55mD/ =0r7   )NNNFNN)r<   r=   r>   r!   r   r)   r   rG   r   r   r   r	   boolr   r   r   r;   r?   r@   s   @r6   r   r      s    	< 	<3 	< %0A6R 2637+/$)59KO|| !. u//0	
 "% D> !!1!12 &eELL%,,,F&GH +, 
 Sr7   r   c                   J    e Zd ZU eed<   dZdZdgZdgZdZ	dZ
dZdZdZeedZy)MinistralPreTrainedModelr*   modelTr   r   )rW   
attentionsN)r<   r=   r>   r!   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r~   _can_record_outputsr   r7   r6   r   r      sQ    &*#01#4"5N!"&.(r7   r   c                   ~     e Zd ZU ej                  ed<   ddef fdZ ej                         e	d               Z
 xZS )MinistralRotaryEmbeddinginv_freqr*   c                    t         |           t        |d      rUt        |j                  t
              r;|j                  j                  d|j                  j                  d            | _        nd| _        |j                  | _	        |j                  | _
        || _        t        | j                     | _        | j                  | j                  |      \  }| _        | j                  d|d       | j                   | _        y )Nrope_scaling	rope_typetypedefaultr   F)
persistent)r(   r)   hasattr
isinstancer   dictgetr   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenr*   r   rope_init_fnattention_scalingregister_bufferr   original_inv_freq)r4   r*   devicer   r5   s       r6   r)   z!MinistralRotaryEmbedding.__init__  s    6>*z&:M:Mt/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q($(ZeD!%r7   c                 b   | j                   d d d d f   j                         j                  |j                  d   dd      j	                  |j
                        }|d d d d d f   j                         }t        |j
                  j                  t              r/|j
                  j                  dk7  r|j
                  j                  nd}t        j                  |d      5  |j                         |j                         z  j                  dd      }t        j                  ||fd	      }|j                         | j                  z  }|j                         | j                  z  }	d d d        j	                  |j                   
      	j	                  |j                   
      fS # 1 sw Y   AxY w)Nr   rB   r    mpscpuF)device_typeenabledrC   rD   )rl   )r   r   r[   rF   ru   r   r   r   strrG   autocastrq   rH   rP   r   rQ   rl   )
r4   r:   rR   inv_freq_expandedposition_ids_expandedr   freqsembrP   rQ   s
             r6   r;   z MinistralRotaryEmbedding.forward&  sQ    !MM$4-8>>@GGHZHZ[\H]_acdehhijiqiqr ,QaZ 8 > > @'1!((--'E!((--[`J`ahhmmfk^^UC&,,.1F1L1L1NNYYZ[]^_E))UEN3C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs    BF%%F.r9   )r<   r=   r>   rG   r   r   r!   r)   no_gradr   r;   r?   r@   s   @r6   r   r     s=    ll/ /" U]]_<  <r7   r   c                       e Zd Zdef fdZee	 	 	 	 	 	 	 ddeej                     deej                     deej                     dee   deej                     dee   d	eej                     d
ee   defd              Z xZS )MinistralModelr*   c           	         t         |   |       |j                  | _        |j                  | _        t        j                  |j                  |j                  | j                        | _        t        j                  t        |j                        D cg c]  }t        ||       c}      | _        t        |j                  |j                        | _        t#        |      | _        d| _        | j)                          y c c}w )Nr   r*   F)r(   r)   pad_token_idpadding_idx
vocab_sizer   	Embeddingr+   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normr   
rotary_embgradient_checkpointing	post_initr   s      r6   r)   zMinistralModel.__init__8  s     !.. ++LL):):F<N<NPTP`P`ammGLVMeMeGfgGf)"695Gfg
 %V%7%7V=P=PQ	2&A&+# 	 hs   D	input_idsrf   rR   r   inputs_embedsr   r   ri   rY   c                    |d u |d uz  rt        d      || j                  |      }|r|t        | j                        }|F||j	                         nd}	t        j                  |	|	|j                  d   z   |j                        }||j                  d      }t        |x}
t              s*| j                  |||||d}t        d
i |t        d
i |d}
|}| j                  ||      }| j                  d | j                  j                    D ]  } ||f|
|j"                     |||||d|}! | j%                  |      }t'        ||r|	      S d 	      S )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   r    )r   )r*   input_embedsrf   r   r   rR   )full_attentionr   )rf   rR   r   r   r   r   )last_hidden_stater   r   )
ValueErrorr   r
   r*   get_seq_lengthrG   arangerF   r   rM   r   r   r   r   r  r  r  r   r  r   )r4   r	  rf   rR   r   r
  r   r   ri   past_seen_tokenscausal_mask_mappingmask_kwargsrW   r   decoder_layers                  r6   r;   zMinistralModel.forwardH  s    -t";<YZZ  --i8M0*$++>O!CRC^==?de"\\ "2]5H5H5K"KTaThThN )33A6L ?-F ++ -"0"0#2 ,K #5"C{"C%F%U%U#
 & #oom\J![[)H4;;+H+HIM)	2=3O3OP) /#-$7	 	M J 		-0&+/8O
 	
>B
 	
r7   )NNNNNNN)r<   r=   r>   r!   r)   r   r   r   rG   r   r   r	   FloatTensorr   r   r   r   r;   r?   r@   s   @r6   r   r   6  s        151537+/59$(59C
E,,-C
 !.C
 u//0	C

 "%C
   1 12C
 D>C
 !!1!12C
 +,C
 
!C
  C
r7   r   c                   d    e Zd ZdgZddiZddgdgfiZ fdZee	 	 	 	 	 	 	 	 	 dde	e
j                     de	e
j                     d	e	e
j                     d
e	e   de	e
j                     de	e
j                     de	e   de	e
j                     deee
j                  f   dee   defd              Z xZS )MinistralForCausalLMzlm_head.weightlm_headcolwise_reprW   logitsc                     t         |   |       t        |      | _        |j                  | _        t        j                  |j                  |j                  d      | _        | j                          y r%   )
r(   r)   r   r   r   r   r-   r+   r  r  r3   s     r6   r)   zMinistralForCausalLM.__init__  sU     #F+
 ++yy!3!3V5F5FUS 	r7   r	  rf   rR   r   r
  labelsr   r   logits_to_keepri   rY   c
                 z    | j                   d|||||||d|
}|j                  }t        |	t              rt	        |	 d      n|	}| j                  |dd|ddf         }d}|* | j                  d||| j                  j                  d|
}t        |||j                  |j                  |j                        S )a  
        Example:

        ```python
        >>> from transformers import AutoTokenizer, MinistralForCausalLM

        >>> model = MinistralForCausalLM.from_pretrained("meta-ministral/Ministral-2-7b-hf")
        >>> tokenizer = AutoTokenizer.from_pretrained("meta-ministral/Ministral-2-7b-hf")

        >>> prompt = "Hey, are you conscious? Can you talk to me?"
        >>> inputs = tokenizer(prompt, return_tensors="pt")

        >>> # Generate
        >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
        >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
        "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
        ```)r	  rf   rR   r   r
  r   r   N)r  r  r   )lossr  r   rW   r   r   )r   r  r   r   slicer  loss_functionr*   r   r   r   rW   r   )r4   r	  rf   rR   r   r
  r  r   r   r  ri   outputsrW   slice_indicesr  r   s                   r6   r;   zMinistralForCausalLM.forward  s    @ ,64:: 	,
)%+')	,
 	,
  118B>SV8W~ot4]kmA}a,?@A%4%%pVFt{{OeOepiopD%#33!//))
 	
r7   )	NNNNNNNNr   )r<   r=   r>   _tied_weights_keys_tp_plan_pp_planr)   r   r   r   rG   r   r   r	   r  r   r   r   r   r   r   r;   r?   r@   s   @r6   r  r    s0   *+=)H_-z:;H  151537+/59-1$(59348
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 ))*8
 D>8
 !!1!128
 c5<</08
 +,8
 
 8
  8
r7   r  c                       e Zd Zy)"MinistralForSequenceClassificationNr<   r=   r>   r   r7   r6   r)  r)        r7   r)  c                       e Zd Zy)MinistralForTokenClassificationNr*  r   r7   r6   r-  r-    r+  r7   r-  c                       e Zd ZdZy)MinistralForQuestionAnsweringtransformerN)r<   r=   r>   r   r   r7   r6   r/  r/    s    %r7   r/  )r   r   r  r)  r-  r/  )Nr    )r   )Btypingr   r   r   rG   r   activationsr   cache_utilsr	   r
   
generationr   integrationsr   masking_utilsr   r   modeling_flash_attention_utilsr   modeling_layersr   r   r   r   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.deprecationr   utils.genericr   configuration_ministralr!   Moduler#   rK   rV   r   r   ra   r   r|   r~   r   r   r   r   r   r  r)  r-  r/  __all__r   r7   r6   <module>rC     s   - ,   ! . ) 7 R B  P K F & I I 0 / 4299  (6	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4>) >)B Y'Jryy J (J(,6 ,^   $!<ryy !<H V
- V
 V
r H
3_ H
 H
V	)IKc 		&CE] 	&$?AY &r7   