
    <h^                        S SK JrJrJr  S SKrS SKJr  SSKJr  SSKJ	r	J
r
  SSKJr  SSKJr  SS	KJr  SS
KJr  SSKJrJr  SSKJrJr  SSKJrJr  SSKJr  SSKJrJ r J!r!  SSK"J#r#  SSK$J%r%   " S S\RL                  5      r' " S S\RL                  5      r( " S S\RL                  5      r)S\RT                  S\+S\RT                  4S jr, S3S\RL                  S\RT                  S\RT                  S \RT                  S!\\RT                     S"\-S#\-S$\\   4S% jjr.S& r/S4S' jr0 " S( S)\RL                  5      r1 " S* S+\5      r2\  " S, S-\5      5       r3\  " S. S/\35      5       r4\  " S0 S1\3\5      5       r5/ S2Qr6g)5    )CallableOptionalUnionN)nn   )ACT2FN)CacheDynamicCache)GenerationMixin)create_causal_mask)FlashAttentionKwargs)GradientCheckpointingLayer)BaseModelOutputWithPastCausalLMOutputWithPast)ROPE_INIT_FUNCTIONSdynamic_rope_update)ALL_ATTENTION_FUNCTIONSPreTrainedModel)Unpack)TransformersKwargsauto_docstringcan_return_tuple)check_model_inputs   )CohereConfigc                   2   ^  \ rS rSrSU 4S jjrS rSrU =r$ )CohereLayerNorm2   c                    > [         TU ]  5         [        R                  " [        R
                  " U5      5      U l        X l        g)zcThe hidden size can be a tuple or an int. The tuple is used for QKNorm to normalize across head_dimN)super__init__r   	Parametertorchonesweightvariance_epsilon)selfhidden_sizeepsbias	__class__s       b/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/cohere/modeling_cohere.pyr!   CohereLayerNorm.__init__3   s-    ll5::k#:; #    c                    UR                   nUR                  [        R                  5      nUR	                  SSS9nX-
  R                  S5      R	                  SSS9nX-
  [        R                  " X@R                  -   5      -  nU R                  R                  [        R                  5      U-  nUR                  U5      $ )NT)keepdim   )	dtypetor#   float32meanpowrsqrtr&   r%   )r'   hidden_statesinput_dtyper6   variances        r,   forwardCohereLayerNorm.forward9   s    #))%((7!!"d!3!(--a055b$5G&-XH]H]=]1^^u}}5E,,r.   )r&   r%   )Ngh㈵>F__name__
__module____qualname____firstlineno__r!   r<   __static_attributes____classcell__r+   s   @r,   r   r   2   s    $- -r.   r   c                   l   ^  \ rS rSrSS\4U 4S jjjr\R                  " 5       \S 5       5       r	Sr
U =r$ )CohereRotaryEmbeddingC   configc                   > [         TU ]  5         [        US5      (       aZ  [        UR                  [
        5      (       a;  UR                  R                  SUR                  R                  S5      5      U l        OSU l        UR                  U l	        UR                  U l
        Xl        [        U R                     U l        U R                  U R                  U5      u  o0l        U R                  SUSS9  U R                   U l        g )Nrope_scaling	rope_typetypedefaultinv_freqF)
persistent)r    r!   hasattr
isinstancerK   dictgetrL   max_position_embeddingsmax_seq_len_cachedoriginal_max_seq_lenrI   r   rope_init_fnattention_scalingregister_bufferrO   original_inv_freq)r'   rI   devicerO   r+   s       r,   r!   CohereRotaryEmbedding.__init__D   s    6>**z&:M:Mt/T/T#0044[&BUBUBYBYZ`BabDN&DN"("@"@$*$B$B!/?+/+<+<T[[&+Q((ZeD!%r.   c                 0   U R                   S S S 2S 4   R                  5       R                  UR                  S   SS5      nUS S 2S S S 24   R                  5       n[	        UR
                  R                  [        5      (       a0  UR
                  R                  S:w  a  UR
                  R                  OSn[        R                  " USS9   UR                  5       UR                  5       -  R                  SS5      n[        R                  " USSS	9nUR                  5       U R                  -  nUR                  5       U R                  -  n	S S S 5        WR                  UR                   S
9W	R                  UR                   S
94$ ! , (       d  f       N@= f)Nr   r0   r   mpscpuF)device_typeenabledr2   dimr3   )rO   floatexpandshaperR   r\   rM   strr#   autocast	transposerepeat_interleavecosrY   sinr4   r3   )
r'   xposition_idsinv_freq_expandedposition_ids_expandedra   freqsembrm   rn   s
             r,   r<   CohereRotaryEmbedding.forwardU   sB    !MM$4-8>>@GGHZHZ[\H]_acde ,QaZ 8 > > @'1!((--'E'E!((--[`J`ahhmmfk^^UC&,,.1F1L1L1NNYYZ[]^_E))%;C'')d444C'')d444C	 D vvAGGv$cff177f&;;; DCs   BF
F)rY   rI   rV   r[   rW   rX   rL   N)r?   r@   rA   rB   r   r!   r#   no_gradr   r<   rC   rD   rE   s   @r,   rG   rG   C   s6    /| / /" ]]_<  <r.   rG   c                   .   ^  \ rS rSrU 4S jrS rSrU =r$ )	CohereMLPe   c                   > [         TU ]  5         Xl        UR                  U l        UR                  U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l        [
        R                  " U R                  U R                  SS9U l	        [        UR                     U l        g NFr*   )r    r!   rI   r(   intermediate_sizer   Linear	gate_projup_proj	down_projr   
hidden_actact_fnr'   rI   r+   s     r,   r!   CohereMLP.__init__f   s    !--!'!9!94#3#3T5K5KRWXyy!1!143I3IPUV4#9#94;K;KRWXV../r.   c                     U R                  U R                  U R                  U5      5      U R                  U5      -  5      nU$ rv   )r   r   r   r   )r'   ro   r   s      r,   r<   CohereMLP.forwardp   s6    NN4;;t~~a/@#ADLLQRO#ST	r.   )r   rI   r   r   r(   r~   r   r>   rE   s   @r,   ry   ry   e   s    0 r.   ry   r9   n_repreturnc                     U R                   u  p#pEUS:X  a  U $ U SS2SS2SSS2SS24   R                  X#XU5      n U R                  X#U-  XE5      $ )z
This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
r   N)rh   rg   reshape)r9   r   batchnum_key_value_headsslenhead_dims         r,   	repeat_kvr   u   s_    
 2?1D1D.Ez!!Qa"23::5W\dlmM  e(CTTTr.   modulequerykeyvalueattention_maskscalingdropoutkwargsc                 @   [        X R                  5      n[        X0R                  5      n	[        R                  " XR	                  SS5      5      U-  n
Ub"  US S 2S S 2S S 2S UR
                  S   24   nX-   n
[        R                  R                  U
S[        R                  S9R                  UR                  5      n
[        R                  R                  XU R                  S9n
[        R                  " X5      nUR	                  SS5      R                  5       nX4$ )Nr2   r   r0   )rd   r3   )ptrainingr   )r   num_key_value_groupsr#   matmulrk   rh   r   
functionalsoftmaxr5   r4   r3   r   r   
contiguous)r   r   r   r   r   r   r   r   
key_statesvalue_statesattn_weightscausal_maskattn_outputs                r,   eager_attention_forwardr      s     3 ; ;<JU$?$?@L<<';';Aq'ABWLL!$Q1.D
0@0@0D.D%DE#1==((2U]](SVVW\WbWbcL==((6??([L,,|:K''1-88:K$$r.   c                 |    U SS S S24   nU SSS S24   n[         R                  " U* U/SS9R                  S5      nU$ )N.r2   r   r0   rc   r   )r#   stackflatten)ro   x1x2rot_xs       r,   rotate_halfr      sL    	
3!8B	
319BKK"b	r*2226ELr.   c                 &   U R                   nU R                  5       n UR                  5       nUR                  U5      nUR                  U5      nX-  [        U 5      U-  -   nX-  [        U5      U-  -   nUR	                  US9UR	                  US94$ )a  Applies Rotary Position Embedding to the query and key tensors.

Args:
    q (`torch.Tensor`): The query tensor.
    k (`torch.Tensor`): The key tensor.
    cos (`torch.Tensor`): The cosine part of the rotary embedding.
    sin (`torch.Tensor`): The sine part of the rotary embedding.
    position_ids (`torch.Tensor`, *optional*):
        Deprecated and unused.
    unsqueeze_dim (`int`, *optional*, defaults to 1):
        The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
        sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
        that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
        k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
        cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
        the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
Returns:
    `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
re   )r3   rf   	unsqueezer   r4   )	qkrm   rn   rp   unsqueeze_dimr3   q_embedk_embeds	            r,   apply_rotary_pos_embr      s    ( GGE		A		A
--
&C
--
&Cw;q>C/0Gw;q>C/0G::E:"GJJUJ$;;;r.   c                   P  ^  \ rS rSrSrSS\S\\   4U 4S jjjr  SS\	R                  S\\	R                  \	R                  4   S\\	R                     S	\\   S
\\	R                     S\\   S\\	R                  \\	R                     \\\	R                        4   4S jjrSrU =r$ )CohereAttention   z=Multi-headed attention from 'Attention Is All You Need' paperrI   	layer_idxc                 R  > [         TU ]  5         Xl        X l        [	        USUR
                  UR                  -  5      U l        UR                  UR                  -  U l	        U R                  S-  U l
        UR                  U l        SU l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR
                  UR                  U R                  -  UR                  S9U l        [        R                  " UR                  U R                  -  UR
                  UR                  S9U l        UR(                  U l        U R(                  (       a_  [+        UR                  U R                  4UR,                  S9U l        [+        UR                  U R                  4UR,                  S9U l        g g )Nr   g      Tr}   r(   r)   )r    r!   rI   r   getattrr(   num_attention_headsr   r   r   r   attention_dropout	is_causalr   r   attention_biasq_projk_projv_projo_projuse_qk_normr   layer_norm_epsq_normk_normr'   rI   r   r+   s      r,   r!   CohereAttention.__init__   s   "
F4F4F&JdJd4de$*$>$>&B\B\$\!}}d*!'!9!9ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii : :T]] JQWQfQf
 ii&&68J8JQWQfQf
 "--)#77GVMbMbDK *#77GVMbMbDK r.   r9   position_embeddingsr   past_key_valuecache_positionr   r   c                    UR                   S S n/ UQSPU R                  P7nU R                  U5      R                  U5      n	U R	                  U5      R                  U5      n
U R                  U5      R                  U5      nU R                  (       a"  U R                  U	5      n	U R                  U
5      n
U	R                  SS5      n	U
R                  SS5      n
UR                  SS5      nUu  p[        XX5      u  pUb$  XUS.nUR                  XU R                  U5      u  p[        nU R                  R                  S:w  a  [         U R                  R                     nU" U U	U
UU4U R"                  (       d  SOU R$                  U R&                  S.UD6u  nnUR(                  " / UQSP76 R+                  5       nU R-                  U5      nUU4$ )Nr0   r   r2   )rn   rm   r   eager        )r   r   )rh   r   r   viewr   r   r   r   r   rk   r   updater   r   rI   _attn_implementationr   r   r   r   r   r   r   )r'   r9   r   r   r   r   r   input_shapehidden_shapequery_statesr   r   rm   rn   cache_kwargsattention_interfacer   r   s                     r,   r<   CohereAttention.forward   s    $))#2.88b8$--8{{=166|D[[/44\B
{{=166|D;;|4LZ0J#--a3))!Q/
#--a3&#7RU#[ %#&nUL'5'<'<ZW[WeWegs't$J(?;;++w6"9$++:Z:Z"[$7	%
  $}}C$2H2HLL	%
 	%
!\ "));;;;FFHkk+.L((r.   )r   rI   r   r   r   r   r   r   r   r   r   r   r   r   rv   )NN)r?   r@   rA   rB   __doc__r   r   intr!   r#   Tensortupler	   
LongTensorr   r   r<   rC   rD   rE   s   @r,   r   r      s    G|   J +/591)||1) #5<<#=>1) !.	1)
 !1) !!1!121) -.1) 
u||Xell3XeELL>Q5RR	S1) 1)r.   r   c                   t  ^  \ rS rSrS\S\4U 4S jjr      SS\R                  S\	\R                     S\	\R                     S\	\   S	\	\   S
\	\R                     S\	\\R                  \R                  4      S\\   S\\R                   \	\\R                   \R                   4      4   4S jjrSrU =r$ )CohereDecoderLayeri  rI   r   c                    > [         TU ]  5         UR                  U l        [        XS9U l        [        U5      U l        [        UR                  UR                  S9U l	        g )N)rI   r   r   )
r    r!   r(   r   	self_attnry   mlpr   r   input_layernormr   s      r,   r!   CohereDecoderLayer.__init__  sP    !--(LV$.F<N<NU[UjUjkr.   r9   r   rp   r   	use_cacher   r   r   r   c                     Un	U R                  U5      nU R                  " SUUUUUUUS.UD6u  pU R                  U5      nX-   U-   nU$ )a  
Args:
    hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
    attention_mask (`torch.FloatTensor`, *optional*):
        attention mask of size `(batch_size, sequence_length)` if flash attention is used or `(batch_size, 1,
        query_sequence_length, key_sequence_length)` if default attention is used.
    past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states
    output_attentions (`bool`, *optional*):
        Whether or not to return the attentions tensors of all attention layers. See `attentions` under
        returned tensors for more detail.
    use_cache (`bool`, *optional*):
        If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding
        (see `past_key_values`).
    cache_position (`torch.LongTensor` of shape `(sequence_length)`, *optional*):
        Indices depicting the position of the input sequence tokens in the sequence
    position_embeddings (`tuple[torch.FloatTensor, torch.FloatTensor]`, *optional*):
        Tuple containing the cosine and sine positional embeddings of shape `(batch_size, seq_len, head_dim)`,
        with `head_dim` being the embedding dimension of each attention head.
)r9   r   rp   r   r   r   r    )r   r   r   )r'   r9   r   rp   r   r   r   r   r   residualhidden_states_attention_hidden_states_mlps                r,   r<   CohereDecoderLayer.forward   st    < !,,];%)^^ 	&
')%)) 3	&
 	&
" !HH]3 :=NNr.   )r(   r   r   r   )NNNFNN)r?   r@   rA   rB   r   r   r!   r#   r   r   r   r	   boolr   r   r   FloatTensorr<   rC   rD   rE   s   @r,   r   r     s   l| l l 2637*.$)59KO.||. !.. u//0	.
 !. D>. !!1!12. &eELL%,,,F&GH. -.. 
u  (51B1BEDUDU1U+V"WW	X. .r.   r   c                   R    \ rS rSr% \\S'   SrSrS/rS/r	Sr
SrSrSrSr\\S.rSrg	)
CoherePreTrainedModeliQ  rI   modelTr   past_key_values)r9   
attentionsr   N)r?   r@   rA   rB   r   __annotations__base_model_prefixsupports_gradient_checkpointing_no_split_modules_skip_keys_device_placement_supports_flash_attn_supports_sdpa_supports_flex_attn_can_compile_fullgraph_supports_attention_backendr   r   _can_record_outputsrC   r   r.   r,   r   r   Q  sQ    &*#-.#4"5N!"&+%r.   r   c                     ^  \ rS rSrS\4U 4S jjr\\       SS\\	R                     S\\	R                     S\\	R                     S\\   S\\	R                     S	\\	R                     S
\\   S\\   S\4S jj5       5       rSrU =r$ )CohereModelid  rI   c           	        > [         TU ]  U5        UR                  U l        UR                  U l        [
        R                  " UR                  UR                  U R                  5      U l        [
        R                  " [        UR                  5       Vs/ sH  n[        X5      PM     sn5      U l        [        UR                  UR                  S9U l        [#        US9U l        SU l        U R)                  5         g s  snf )Nr   )rI   F)r    r!   pad_token_idpadding_idx
vocab_sizer   	Embeddingr(   embed_tokens
ModuleListrangenum_hidden_layersr   layersr   r   normrG   
rotary_embgradient_checkpointing	post_initr   s      r,   r!   CohereModel.__init__f  s     !.. ++LL):):F<N<NPTP`P`ammDI&JbJbDcdDcy2Dcd
 $1C1C&J_J_`	/v>&+# 	 es   C>	input_idsr   rp   r   inputs_embedsr   r   r   r   c           
      8   US L US L-  (       a  [        S5      eUc  U R                  U5      nU(       a  Uc
  [        5       nUcD  Ub  UR                  5       OSn	[        R
                  " XUR                  S   -   UR                  S9nUc  UR                  S5      n[        U R                  UUUUUS9n
UnU R                  X5      nU R                  S U R                  R                    H  nU" U4U
UUUUS.UD6nM     U R                  U5      n[        UUS9$ )Nz:You must specify exactly one of input_ids or inputs_embedsr   r   )r\   )rI   input_embedsr   r   r   rp   )r   rp   r   r   r   )last_hidden_stater   )
ValueErrorr	  r
   get_seq_lengthr#   arangerh   r\   r   r   rI   r  r  r  r  r   )r'   r  r   rp   r   r  r   r   r   past_seen_tokensr   r9   r   decoder_layers                 r,   r<   CohereModel.forwardv  sK    -t";<YZZ *.*;*;I*FM0*nO!CRC^==?de+0<< ]5H5H5K"KTaThTh,N )33A6L(;;&))+%
 &"oomJ![[)H4;;+H+HIM)*).-$7 M J 		-0&++
 	
r.   )r	  r  r  r  r  r  r  )NNNNNNN)r?   r@   rA   rB   r   r!   r   r   r   r#   r   r   r	   r   r   r   r   r   r<   rC   rD   rE   s   @r,   r  r  d  s    |    151537+/5959$(8
E,,-8
 !.8
 u//0	8

 "%8
   1 128
 !!1!128
 D>8
 +,8
 
!8
  8
r.   r  c                     ^  \ rS rSrS/rSS0rSS/S/40rU 4S jrS rS	 r	\
\           SS
\\R                     S\\R                     S\\R                     S\\\\\R&                     4      S\\R&                     S\\R                     S\\   S\\   S\\   S\\R                     S\\\R                  4   S\\   S\4S jj5       5       rSrU =r$ )CohereForCausalLMi  zlm_head.weightlm_headcolwise_repr9   logitsc                 (  > [         TU ]  U5        [        U5      U l        UR                  U l        [
        R                  " UR                  UR                  SS9U l        UR                  U l	        UR                  U l
        U R                  5         g r|   )r    r!   r  r   r  r   r   r(   r   logit_scaletie_word_embeddingsr  r   s     r,   r!   CohereForCausalLM.__init__  sq      (
 ++yy!3!3V5F5FUS!--#)#=#=  	r.   c                     Xl         g rv   r   )r'   decoders     r,   set_decoderCohereForCausalLM.set_decoder  s    
r.   c                     U R                   $ rv   r(  )r'   s    r,   get_decoderCohereForCausalLM.get_decoder  s    zzr.   r  r   rp   r   r  labelsr   output_attentionsoutput_hidden_statesr   logits_to_keepr   r   c                    Ub  UOU R                   R                  nU	b  U	OU R                   R                  n	U R                  " SUUUUUUUU	U
S.	UD6nUR                  n[        U[        5      (       a  [        U* S5      OUnU R                  USS2USS24   5      nUU R                  -  nSnUb)  U R                  " SUX`R                   R                  S.UD6n[        UUUR                  UR                  UR                  S9$ )a  
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
    config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
    (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.

Example:

```python
>> from transformers import AutoTokenizer, CohereForCausalLM

>> model = CohereForCausalLM.from_pretrained("CohereForAI/c4ai-command-r-v01")
>> tokenizer = AutoTokenizer.from_pretrained("CohereForAI/c4ai-command-r-v01")

>> prompt = "Hey, are you conscious? Can you talk to me?"
>> inputs = tokenizer(prompt, return_tensors="pt")

>> # Generate
>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
```N)	r  r   rp   r   r  r   r0  r1  r   )r"  r/  r  )lossr"  r   r9   r   r   )rI   r0  r1  r   r  rR   r   slicer   r$  loss_functionr  r   r   r9   r   )r'   r  r   rp   r   r  r/  r   r0  r1  r   r2  r   outputsr9   slice_indicesr"  r4  s                     r,   r<   CohereForCausalLM.forward  s(   N 2C1N-TXT_T_TqTq$8$D $++JjJj 	
 ,0:: ,
)%+'/!5),
 ,
  118B>SV8W8W~ot4]kmA}a,?@A$***%%pVF{{OeOepiopD%#33!//))
 	
r.   )r   r$  r   r%  r  )NNNNNNNNNNr   )r?   r@   rA   rB   _tied_weights_keys_tp_plan_pp_planr!   r*  r-  r   r   r   r#   r   r   r   r	   listr   r   r   r   r   r   r<   rC   rD   rE   s   @r,   r  r    s   *+=)H_-z:;H	  151537KO59-1$(,0/35934H
E,,-H
 !.H
 u//0	H

 "%tE4E4E/F(F"GHH
   1 12H
 ))*H
 D>H
 $D>H
 'tnH
 !!1!12H
 c5<</0H
 +,H
 
 H
  H
r.   r  )r  r  r   )r   )Nr   )7typingr   r   r   r#   r   activationsr   cache_utilsr	   r
   
generationr   masking_utilsr   modeling_flash_attention_utilsr   modeling_layersr   modeling_outputsr   r   modeling_rope_utilsr   r   modeling_utilsr   r   processing_utilsr   utilsr   r   r   utils.genericr   configuration_coherer   Moduler   rG   ry   r   r   r   rf   r   r   r   r   r   r   r  r  __all__r   r.   r,   <module>rN     s  < - ,   ! . ) / B 9 O K F & I I / .-bii -"<BII <D		  	UU\\ 	U# 	U%,, 	U& %II%<<% 
% <<	%
 U\\*% % % '(%4<<T)bii T)n63 6r O  $ K
' K
 K
\ `
- `
 `
F Hr.   