
    <h
                        S r SSKrSSKJrJr  SSKrSSKrSSKJr  SSKJ	r	J
r
Jr  SSKJrJr  SSKJrJr  SS	KJr  SS
KJr  SSKJrJrJrJrJrJrJrJr  SSKJ r   SSK!J"r"J#r#J$r$  SSK%J&r&J'r'  SSK(J)r)  \'RT                  " \+5      r, " S S\RZ                  5      r. " S S\RZ                  5      r/ " S S\RZ                  5      r0 " S S\RZ                  5      r1 " S S\RZ                  5      r2 " S S\RZ                  5      r3 " S S\RZ                  5      r4 " S S \5      r5 " S! S"\RZ                  5      r6 " S# S$\RZ                  5      r7\& " S% S&\ 5      5       r8\&" S'S(9 " S) S*\85      5       r9\&" S+S(9 " S, S-\8\5      5       r:\& " S. S/\85      5       r; " S0 S1\RZ                  5      r<\&" S2S(9 " S3 S4\85      5       r=\& " S5 S6\85      5       r>\& " S7 S8\85      5       r? " S9 S:\RZ                  5      r@\& " S; S<\85      5       rAS?S= jrB/ S>QrCg)@zPyTorch X-MOD model.    N)OptionalUnion)nn)BCEWithLogitsLossCrossEntropyLossMSELoss   )ACT2FNgelu)CacheEncoderDecoderCache)GenerationMixin)GradientCheckpointingLayer))BaseModelOutputWithPastAndCrossAttentions,BaseModelOutputWithPoolingAndCrossAttentions!CausalLMOutputWithCrossAttentionsMaskedLMOutputMultipleChoiceModelOutputQuestionAnsweringModelOutputSequenceClassifierOutputTokenClassifierOutput)PreTrainedModel)apply_chunking_to_forward find_pruneable_heads_and_indicesprune_linear_layer)auto_docstringlogging   )
XmodConfigc                   >   ^  \ rS rSrSrU 4S jr SS jrS rSrU =r	$ )XmodEmbeddings1   zN
Same as BertEmbeddings with a tiny tweak for positional embeddings indexing.
c                   > [         TU ]  5         [        R                  " UR                  UR
                  UR                  S9U l        [        R                  " UR                  UR
                  5      U l	        [        R                  " UR                  UR
                  5      U l        [        R                  " UR
                  UR                  S9U l        [        R                  " UR                  5      U l        [#        USS5      U l        U R'                  S[(        R*                  " UR                  5      R-                  S5      SS9  U R'                  S	[(        R.                  " U R0                  R3                  5       [(        R4                  S
9SS9  UR                  U l        [        R                  " UR                  UR
                  U R6                  S9U l	        g )N)padding_idxepsposition_embedding_typeabsoluteposition_ids)r   F)
persistenttoken_type_idsdtype)super__init__r   	Embedding
vocab_sizehidden_sizepad_token_idword_embeddingsmax_position_embeddingsposition_embeddingstype_vocab_sizetoken_type_embeddings	LayerNormlayer_norm_epsDropouthidden_dropout_probdropoutgetattrr'   register_buffertorcharangeexpandzerosr)   sizelongr$   selfconfig	__class__s     ^/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/xmod/modeling_xmod.pyr0   XmodEmbeddings.__init__7   si   !||F,=,=v?Q?Q_e_r_rs#%<<0N0NPVPbPb#c %'\\&2H2H&J\J\%]" f&8&8f>S>STzz&"<"<='.v7PR\']$ELL)G)GHOOPWXej 	 	
 	ekk$*;*;*@*@*B%**Ubg 	 	

 "..#%<<**F,>,>DL\L\$
     c                    Uc+  Ub  [        XR                  U5      nOU R                  U5      nUb  UR                  5       nOUR                  5       S S nUS   nUcv  [	        U S5      (       a-  U R
                  S S 2S U24   nUR                  US   U5      n	U	nO8[        R                  " U[        R                  U R                  R                  S9nUc  U R                  U5      nU R                  U5      n
XJ-   nU R                  S:X  a  U R                  U5      nX-  nU R!                  U5      nU R#                  U5      nU$ )Nr*   r   r,   r   r.   devicer(   )"create_position_ids_from_input_idsr$   &create_position_ids_from_inputs_embedsrE   hasattrr,   rC   rA   rD   rF   r)   rP   r5   r9   r'   r7   r:   r>   )rH   	input_idsr,   r)   inputs_embedspast_key_values_lengthinput_shape
seq_lengthbuffered_token_type_ids buffered_token_type_ids_expandedr9   
embeddingsr7   s                rK   forwardXmodEmbeddings.forwardP   sM    $A)M]M]_uv#JJ=Y #..*K',,.s3K ^

 !t-..*.*=*=a*n*M'3J3Q3QR]^_R`bl3m0!A!&[

SWSdSdSkSk!l  00;M $ : :> J":
'':5"&":":<"H-J^^J/
\\*-
rM   c                    UR                  5       SS nUS   n[        R                  " U R                  S-   X0R                  -   S-   [        R                  UR
                  S9nUR                  S5      R                  U5      $ )z
We are provided embeddings directly. We cannot infer which are padded so just generate sequential position ids.

Args:
    inputs_embeds: torch.Tensor

Returns: torch.Tensor
Nr*   r   rO   r   )rE   rA   rB   r$   rF   rP   	unsqueezerC   )rH   rU   rW   sequence_lengthr)   s        rK   rR   5XmodEmbeddings.create_position_ids_from_inputs_embedsx   s~     $((*3B/%a.||q /4D4D"Dq"HPUPZPZcpcwcw
 %%a(//<<rM   )r:   r>   r$   r'   r7   r9   r5   )NNNNr   )
__name__
__module____qualname____firstlineno____doc__r0   r\   rR   __static_attributes____classcell__rJ   s   @rK   r!   r!   1   s$    

4 rs&P= =rM   r!   c                     ^  \ rS rSrSU 4S jjr      SS\R                  S\\R                     S\\R                     S\\R                     S\\	   S\\
   S	\\R                     S
\\R                     4S jjrSrU =r$ )XmodSelfAttention   c                   > [         TU ]  5         UR                  UR                  -  S:w  a7  [	        US5      (       d&  [        SUR                   SUR                   S35      eUR                  U l        [        UR                  UR                  -  5      U l        U R                  U R                  -  U l        [        R                  " UR                  U R                  5      U l        [        R                  " UR                  U R                  5      U l        [        R                  " UR                  U R                  5      U l        [        R                  " UR                  5      U l        U=(       d    [#        USS5      U l        U R$                  S:X  d  U R$                  S	:X  aG  UR&                  U l        [        R(                  " S
UR&                  -  S-
  U R                  5      U l        UR,                  U l        X0l        g )Nr   embedding_sizezThe hidden size (z6) is not a multiple of the number of attention heads ()r'   r(   relative_keyrelative_key_query   r   )r/   r0   r3   num_attention_headsrS   
ValueErrorintattention_head_sizeall_head_sizer   Linearquerykeyvaluer<   attention_probs_dropout_probr>   r?   r'   r6   r1   distance_embedding
is_decoder	layer_idxrH   rI   r'   r   rJ   s       rK   r0   XmodSelfAttention.__init__   s    : ::a?PVXhHiHi#F$6$6#7 8 445Q8 
 $*#=#= #&v'9'9F<V<V'V#W !558P8PPYYv1143E3EF
99V//1C1CDYYv1143E3EF
zz&"E"EF'> (
'-zC
$ ''>9T=Y=Y]q=q+1+I+ID(&(ll1v7U7U3UXY3Y[_[s[s&tD# ++"rM   hidden_statesattention_mask	head_maskencoder_hidden_statespast_key_valueoutput_attentionscache_positionreturnc                 `	   UR                   u  pn
U R                  U5      nUR                  USU R                  U R                  5      R                  SS5      nUS LnUb]  [        U[        5      (       aF  UR                  R                  U R                  5      nU(       a  UR                  nOUR                  nOUnU(       a  UOUnU(       aQ  UbN  W(       aG  WR                  U R                     R                  nUR                  U R                     R                  nOU R!                  U5      nUR                  USU R                  U R                  5      R                  SS5      nU R#                  U5      nUR                  USU R                  U R                  5      R                  SS5      nUbN  U(       d  UOS nWR%                  UUU R                  SU05      u  nnU(       a  SUR                  U R                  '   [&        R(                  " UUR                  SS5      5      nU R*                  S:X  d  U R*                  S:X  Ga  UR                   S   UR                   S   nnUbB  [&        R,                  " US-
  [&        R.                  UR0                  S	9R                  SS5      nO>[&        R2                  " U[&        R.                  UR0                  S	9R                  SS5      n[&        R2                  " U[&        R.                  UR0                  S	9R                  SS5      nUU-
  nU R5                  UU R6                  -   S-
  5      nUR9                  UR:                  S
9nU R*                  S:X  a  [&        R<                  " SUU5      nUU-   nOHU R*                  S:X  a8  [&        R<                  " SUU5      n[&        R<                  " SUU5      nUU-   U-   nU[>        R@                  " U R                  5      -  nUb  UU-   n[B        RD                  RG                  USS9nU RI                  U5      nUb  UU-  n[&        R(                  " UU5      nURK                  SSSS5      RM                  5       nURO                  5       S S U RP                  4-   nUR                  U5      nUU4$ )Nr*   r   rr   r   Trp   rq   rO   r-   zbhld,lrd->bhlrzbhrd,lrd->bhlrdimr   r	   ))shapery   viewrs   rv   	transpose
isinstancer   
is_updatedgetr   cross_attention_cacheself_attention_cachelayerskeysvaluesrz   r{   updaterA   matmulr'   tensorrF   rP   rB   r}   r6   tor.   einsummathsqrtr   
functionalsoftmaxr>   permute
contiguousrE   rw   )rH   r   r   r   r   r   r   r   
batch_sizerX   _query_layeris_cross_attentionr   curr_past_key_valuecurrent_states	key_layervalue_layerattention_scoresquery_length
key_lengthposition_ids_lposition_ids_rdistancepositional_embeddingrelative_position_scoresrelative_position_scores_queryrelative_position_scores_keyattention_probscontext_layernew_context_layer_shapes                                  rK   r\   XmodSelfAttention.forward   sO    %2$7$7!
jj/!&&z2t7O7OQUQiQijttq
 3$>%.*=>>+66::4>>J
%*8*N*N'*8*M*M'&4#2D.-."<+224>>BGGI-44T^^DKKK0I!z2t7O7OQUQiQijtt1I **^4K%**B 8 8$:R:Ri1o  )7It)<)C)C{DNN=M~<^*&	; &@DN--dnn= !<<Y5H5HR5PQ''>9T=Y=Y]q=q'2'8'8';Y__Q=O*L)!&j1nEJJWdWkWk!l!q!q" "'l%**UbUiUi!j!o!oprtu!v"\\*EJJ}OcOcdiijkmopN%6H#'#:#:8dFbFb;bef;f#g #7#:#:ARAR#:#S ++~=+0<<8H+Wk+l(#36N#N --1EE16>NP[]q1r./4||<LiYm/n,#36T#TWs#s +dii8P8P.QQ%/.@ --//0@b/I ,,7  -	9O_kB%--aAq9DDF"/"4"4"6s";t?Q?Q>S"S%**+BCo--rM   )rw   rv   r}   r>   r~   rz   r   r6   rs   r'   ry   r{   NNNNNNFN)rb   rc   rd   re   r0   rA   Tensorr   FloatTensorr   booltupler\   rg   rh   ri   s   @rK   rk   rk      s    #< 7;15=A*.,115d.||d. !!2!23d. E--.	d.
  ((9(9:d. !d. $D>d. !.d. 
u||	d. d.rM   rk   c                   z   ^  \ rS rSrU 4S jrS\R                  S\R                  S\R                  4S jrSrU =r	$ )XmodSelfOutputi  c                 (  > [         TU ]  5         [        R                  " UR                  UR                  5      U l        [        R                  " UR                  UR                  S9U l        [        R                  " UR                  5      U l
        g Nr%   )r/   r0   r   rx   r3   denser:   r;   r<   r=   r>   rG   s     rK   r0   XmodSelfOutput.__init__  s`    YYv1163E3EF
f&8&8f>S>STzz&"<"<=rM   r   input_tensorr   c                 R    U R                  U5      nU R                  U5      nX-   nU$ N)r   r>   )rH   r   r   s      rK   r\   XmodSelfOutput.forward  s,    

=1]3%4rM   )r:   r   r>   
rb   rc   rd   re   r0   rA   r   r\   rg   rh   ri   s   @rK   r   r     s6    >U\\  RWR^R^  rM   r   c                     ^  \ rS rSrSU 4S jjrS r      SS\R                  S\\R                     S\\R                     S\\R                     S\\
   S	\\   S
\\R                     S\\R                     4S jjrSrU =r$ )XmodAttentioni  c                    > [         TU ]  5         [        XUS9U l        [	        U5      U l        [        5       U l        UR                  U l        g )Nr'   r   )	r/   r0   rk   rH   r   outputsetpruned_headspre_normr   s       rK   r0   XmodAttention.__init__  s>    %firs	$V,ErM   c                 6   [        U5      S:X  a  g [        XR                  R                  U R                  R                  U R
                  5      u  p[        U R                  R                  U5      U R                  l        [        U R                  R                  U5      U R                  l        [        U R                  R                  U5      U R                  l	        [        U R                  R                  USS9U R                  l        U R                  R                  [        U5      -
  U R                  l        U R                  R                  U R                  R                  -  U R                  l        U R
                  R                  U5      U l        g )Nr   r   r   )lenr   rH   rs   rv   r   r   ry   rz   r{   r   r   rw   union)rH   headsindexs      rK   prune_headsXmodAttention.prune_heads&  s   u:?79900$))2O2OQUQbQb

 -TYY__eD		*499==%@		,TYY__eD		.t{{/@/@%QO )-		(E(EE
(R		%"&))"?"?$))B_B_"_		 --33E:rM   r   r   r   r   r   r   r   r   c           	      $   UnU R                   (       a  U R                  R                  U5      nU R                  UUUUUUU5      n	U R                  U	S   U5      n
U R                   (       d  U R                  R                  U
5      n
U
4U	SS  -   nU$ )Nr   r   )r   r   r:   rH   )rH   r   r   r   r   r   r   r   residualself_outputsattention_outputoutputss               rK   r\   XmodAttention.forward8  s     !== KK11-@Myy!
  ;;|AA}}#{{445EF#%QR(88rM   )r   r   r   rH   r   r   )rb   rc   rd   re   r0   r   rA   r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r   r     s    (;* 7;15=A*.,115|| !!2!23 E--.	
  ((9(9: ! $D> !. 
u||	 rM   r   c                   b   ^  \ rS rSrU 4S jrS\R                  S\R                  4S jrSrU =r	$ )XmodIntermediateiV  c                   > [         TU ]  5         [        R                  " UR                  UR
                  5      U l        [        UR                  [        5      (       a  [        UR                     U l        g UR                  U l        g r   )r/   r0   r   rx   r3   intermediate_sizer   r   
hidden_actstrr
   intermediate_act_fnrG   s     rK   r0   XmodIntermediate.__init__W  s`    YYv1163K3KL
f''--'-f.?.?'@D$'-'8'8D$rM   r   r   c                 J    U R                  U5      nU R                  U5      nU$ r   r   r   rH   r   s     rK   r\   XmodIntermediate.forward_  s&    

=100?rM   r   r   ri   s   @rK   r   r   V  s(    9U\\ ell  rM   r   c                   b   ^  \ rS rSrU 4S jrS\R                  S\R                  4S jrSrU =r	$ )XmodAdapterie  c                   > [         TU ]  5         UR                  UR                  -  U l        [
        R                  " UR                  U R                  5      U l        [
        R                  " U R                  UR                  5      U l        [        UR                  [        5      (       a  [        UR                     U l        g UR                  U l        g r   )r/   r0   r3   adapter_reduction_factorbottleneck_sizer   rx   dense1dense2r   r   r   r
   adapter_act_fnrG   s     rK   r0   XmodAdapter.__init__f  s    %11V5T5TTii 2 2D4H4HIii 4 4f6H6HIf''--"():):";D"("3"3DrM   r   r   c                 l    U R                  U5      nU R                  U5      nU R                  U5      nU$ r   )r   r   r   r   s     rK   r\   XmodAdapter.forwardp  s4    M2++M:M2rM   )r   r   r   r   r   ri   s   @rK   r   r   e  s(    4U\\ ell  rM   r   c                      ^  \ rS rSrU 4S jrS\R                  S\R                  S\R                  S\R                  4S jrS\R                  S\R                  4S jrS	r	U =r
$ )

XmodOutputiw  c                   > [         TU ]  5         [        R                  " UR                  UR
                  5      U l        [        R                  " UR
                  UR                  S9U l        UR                  U l	        [        R                  " UR                  5      U l        UR                  (       a/  [        R                  " UR
                  UR                  S9U l        OS U l        UR                  U l        [        R                  " 0 5      U l        UR"                   H$  n[%        U5      U R                   ['        U5      '   M&     g r   )r/   r0   r   rx   r   r3   r   r:   r;   ln_before_adapterr<   r=   r>   adapter_layer_normadapter_reuse_layer_norm
ModuleDictadapter_modules	languagesr   r   )rH   rI   languagerJ   s      rK   r0   XmodOutput.__init__x  s    YYv779K9KL
f&8&8f>S>ST!'!9!9zz&"<"<=$$&(ll63E3E6K`K`&aD#&*D#(.(G(G%!}}R0((H2=f2ED  X/ )rM   r   r   lang_idsr   c                 t    U R                  U5      nU R                  U5      nX-   nU R                  X15      nU$ r   )r   r>   lang_adapter)rH   r   r   r  s       rK   r\   XmodOutput.forward  s<    

=1]3%4))(BrM   c                    [         R                  " USS9u  pU R                  (       d  UnU R                  b  U R                  U5      nO"U R                  (       a  U R                  U5      nU R                  (       a  Un[         R                  " X#R                  5       S5      n/ n[        [        X5      5       Hi  u  nu  p[        U R                  R                  5       5      [        UR                  5       5         n
UR                  U R                  U
   " U	5      5        Mk     [         R                   " US5      nU R#                  U5      nUW-  nU$ )NT)return_countsr   )rA   unique_consecutiver   r   r   r:   splittolist	enumerateziplistr   r   ru   itemappendcatr>   )rH   r  r   lang_lengthsr   split_hidden_stateslang_wise_outputsilang_idsplit_hidden_statelangs              rK   r  XmodOutput.lang_adapter  s   !&!9!9(RV!W%%$H"". 33MBM** NN=9M!!$H#kk-9L9L9NPQR09#h:\0],A,,,1134S5HID$$T%9%9$%?@R%ST 1^ 		"3Q7]3!rM   )r:   r   r   r   r   r>   r   )rb   rc   rd   re   r0   rA   r   r\   r  rg   rh   ri   s   @rK   r   r   w  s`    FU\\  Y^YeYe jojvjv U\\ %,,  rM   r   c                   F  ^  \ rS rSrSU 4S jjr       SS\R                  S\R                  S\\R                     S\\R                     S\\R                     S\\R                     S	\\	   S
\\
   S\\R                     S\\R                     4S jjrS rSrU =r$ )	XmodLayeri  c                   > [         TU ]  5         UR                  U l        SU l        [	        XS9U l        UR                  U l        UR                  U l        U R                  (       a/  U R                  (       d  [        U  S35      e[	        USUS9U l	        [        U5      U l        [        U5      U l        UR                  U l        g )Nr   r   z> should be used as a decoder model if cross attention is addedr(   r   )r/   r0   chunk_size_feed_forwardseq_len_dimr   	attentionr~   add_cross_attentionrt   crossattentionr   intermediater   r   r   )rH   rI   r   rJ   s      rK   r0   XmodLayer.__init__  s    '-'E'E$&vC ++#)#=#= ##?? D6)g!hii"/PZfo"pD,V4 (rM   r   r  r   r   r   encoder_attention_maskr   r   r   r   c
           
      $   U R                  UUUUUU	S9n
U
S   nU
SS  nU R                  (       aD  UbA  [        U S5      (       d  [        SU  S35      eU R	                  UUUUUUU	S9nUS   nXSS  -   nUnU R
                  (       a  U R                  R                  U5      n[        U R                  U R                  U R                  U5      nU R                  XU5      nU R
                  (       d  U R                  R                  U5      nU4U-   $ )N)r   r   r   r   r   r   r   r"  z'If `encoder_hidden_states` are passed, z` has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`)r   r   r   r   r   r   )r   r~   rS   rt   r"  r   r   r:   r   feed_forward_chunkr  r  )rH   r   r  r   r   r   r%  r   r   r   self_attention_outputsr   r   cross_attention_outputsr   intermediate_outputlayer_outputs                    rK   r\   XmodLayer.forward  sN    "&)/)) "0 "
 2!4(,??4@4!122 =dV DD D 
 '+&9&9 5#&;-"3- ': '#  7q9 ;;G#==#{{445EF7##((	
 {{#6(K}};;00>L((rM   c                 $    U R                  U5      $ r   )r#  )rH   r   s     rK   r'  XmodLayer.feed_forward_chunk  s      !122rM   )	r!  r   r  r"  r#  r~   r   r   r  r   )NNNNNFN)rb   rc   rd   re   r0   rA   r   r   r   r   r   r   r\   r'  rg   rh   ri   s   @rK   r  r    s    (& 7;15=A>B*.,1156)||6) ,,6) !!2!23	6)
 E--.6)  ((9(9:6) !)):): ;6) !6) $D>6) !.6) 
u||	6)p3 3rM   r  c                     ^  \ rS rSrU 4S jr          SS\R                  S\R                  S\\R                     S\\R                     S\\R                     S\\R                     S	\\	\	\R                           S
\\
   S\\
   S\\
   S\\
   S\\R                     S\\	\R                     \4   4S jjrSrU =r$ )XmodEncoderi  c           
      p  > [         TU ]  5         Xl        [        R                  " [        UR                  5       Vs/ sH  n[        XS9PM     sn5      U l        UR                  U l
        U R                  (       a.  [        R                  " UR                  UR                  S9U l        SU l        g s  snf )Nr  r%   F)r/   r0   rI   r   
ModuleListrangenum_hidden_layersr  layerr   is_pre_normr:   r3   r;   gradient_checkpointing)rH   rI   r  rJ   s      rK   r0   XmodEncoder.__init__  s    ]]ERXRjRjLk#lLkqIf$BLk#lm
!??\\&*<*<&BWBWXDN&+#	 $ms   B3r   r  r   r   r   r%  past_key_values	use_cacher   output_hidden_statesreturn_dictr   r   c                 t   U R                   (       a/  U R                  (       a  U(       a  [        R                  S5        SnSnU(       aB  [	        U[
        5      (       d-  [        R                  S5        Sn[        R                  " U5      nU
(       a  SOS nU	(       a  SOS nU	(       a  U R                  R                  (       a  SOS n[        U R                  5       Hi  u  nnU
(       a  X4-   nUb  UU   OS nU" UUUUUUUU	U5	      nUS   nU	(       d  M:  UUS   4-   nU R                  R                  (       d  M`  UUS   4-   nMk     U R                  (       a  U R                  U5      nU
(       a  X4-   nU(       a  UR                  5       nU(       d  [        S	 UUUUU4 5       5      $ [!        UUUUUS
9$ )NzZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...FzPassing a tuple of `past_key_values` is deprecated and will be removed in Transformers v4.58.0. You should pass an instance of `EncoderDecoderCache` instead, e.g. `past_key_values=EncoderDecoderCache.from_legacy_cache(past_key_values)`.T r   r   rr   c              3   .   #    U H  nUc  M  Uv   M     g 7fr   r>  ).0vs     rK   	<genexpr>&XmodEncoder.forward.<locals>.<genexpr>E  s"      
A  s   	)last_hidden_stater9  r   
attentionscross_attentions)r7  trainingloggerwarning_oncer   r   r   from_legacy_cacherI   r!  r  r5  r6  r:   to_legacy_cacher   r   )rH   r   r  r   r   r   r%  r9  r:  r   r;  r<  r   return_legacy_cacheall_hidden_statesall_self_attentionsall_cross_attentionsr  layer_modulelayer_head_masklayer_outputss                        rK   r\   XmodEncoder.forward  s    &&4==##p "	#Z??\
 #'1CCOTO"6BD$5b4%64;;;Z;Zr`d(4OA|#$58H$H!.7.CilO(%&!
M *!,M  &9]1=M<O&O#;;222+?=QRCSBU+U(-  50  NN=9M 14D D-==?O 
 "#%'(
 
 
 9+++*1
 	
rM   )r:   rI   r7  r6  r5  )
NNNNNNFFTN)rb   rc   rd   re   r0   rA   r   r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r0  r0    s7   , 7;15=A>BEI$(,1/4&*15W
||W
 ,,W
 !!2!23	W

 E--.W
  ((9(9:W
 !)):): ;W
 "%e.?.?(@"ABW
 D>W
 $D>W
 'tnW
 d^W
 !.W
 
uU\\"$MM	NW
 W
rM   r0  c                   b   ^  \ rS rSrU 4S jrS\R                  S\R                  4S jrSrU =r	$ )
XmodPooleriZ  c                    > [         TU ]  5         [        R                  " UR                  UR                  5      U l        [        R                  " 5       U l        g r   )r/   r0   r   rx   r3   r   Tanh
activationrG   s     rK   r0   XmodPooler.__init__[  s9    YYv1163E3EF
'')rM   r   r   c                 \    US S 2S4   nU R                  U5      nU R                  U5      nU$ Nr   )r   rX  )rH   r   first_token_tensorpooled_outputs       rK   r\   XmodPooler.forward`  s6     +1a40

#566rM   )rX  r   r   ri   s   @rK   rU  rU  Z  s(    $
U\\ ell  rM   rU  c                   B    \ rS rSr% \\S'   SrSrS rS\	4S jr
S rS	rg
)XmodPreTrainedModelii  rI   robertaTc                    [        U[        R                  5      (       ak  UR                  R                  R                  SU R                  R                  S9  UR                  b%  UR                  R                  R                  5         gg[        U[        R                  5      (       ax  UR                  R                  R                  SU R                  R                  S9  UR                  b2  UR                  R                  UR                     R                  5         gg[        U[        R                  5      (       aJ  UR                  R                  R                  5         UR                  R                  R                  S5        g[        U[        5      (       a%  UR                  R                  R                  5         gg)zInitialize the weightsg        )meanstdNg      ?)r   r   rx   weightdatanormal_rI   initializer_rangebiaszero_r1   r$   r:   fill_
XmodLMHead)rH   modules     rK   _init_weights!XmodPreTrainedModel._init_weightsp  s2   fbii(( MM&&CT[[5R5R&S{{&  &&( '--MM&&CT[[5R5R&S!!-""6#5#56<<> .--KK""$MM$$S)
++KK""$ ,rM   r  c           	          XR                   R                  ;  a0  [        U  SU S[        U R                   R                  5       35      eXR                   l        g)z
Set the default language code for the model. This is used when the language is not specified in the input.

Args:
    language (`str`): The language code, such as `"en_XX"` or `"de_DE"`.
z does not have an adapter for z. Supported languages: N)rI   r   rt   r  default_language)rH   r  s     rK   set_default_language(XmodPreTrainedModel.set_default_language  sW     ;;000&6xj@WX\]a]h]h]r]rXsWtu  (0$rM   c                     [         R                  S5        U R                  R                  R	                  5        H
  nSUl        M     [         R                  S5        U R                  R                  R                   H~  nUR                  R                  b2  UR                  R                  R	                  5        H
  nSUl        M     UR                  R                  R	                  5        H
  nSUl        M     M     g)z
Freeze the embeddings and language adapters of the model. Usually, this is applied before the model is
fine-tuned on a downstream task.
zFreezing embeddingsFzFreezing adaptersN)rH  infora  r[   
parametersrequires_gradencoderr5  r   r   r   )rH   	parameterr5  s      rK   'freeze_embeddings_and_language_adapters;XmodPreTrainedModel.freeze_embeddings_and_language_adapters  s    
 	)*00;;=I&+I# >'(\\))//E||..:!&!@!@!K!K!MI.3I+ "N"\\99DDF	*/	' G	 0rM   r>  N)rb   rc   rd   re   r   __annotations__base_model_prefixsupports_gradient_checkpointingrn  r   rr  rz  rg   r>  rM   rK   r`  r`  i  s*    !&*#%$0S 00rM   r`  a0  
    The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
    cross-attention is added between the self-attention layers, following the architecture described in *Attention is
    all you need*_ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones, Aidan N. Gomez, Lukasz
    Kaiser and Illia Polosukhin.

    To behave as an decoder the model needs to be initialized with the `is_decoder` argument of the configuration set
    to `True`. To be used in a Seq2Seq model, the model needs to initialized with both `is_decoder` argument and
    `add_cross_attention` set to `True`; an `encoder_hidden_states` is then expected as an input to the forward pass.

    .. _*Attention is all you need*: https://huggingface.co/papers/1706.03762
    )custom_introc            $         ^  \ rS rSrSU 4S jjrS rS rS r\               SS\	\
R                     S\	\
R                     S\	\
R                     S	\	\
R                     S
\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\\
R                        S\	\   S\	\   S\	\   S\	\   S\	\
R                     S\\\
R                     \4   4 S jj5       rSrU =r$ )	XmodModeli  c                    > [         TU ]  U5        Xl        [        U5      U l        [        U5      U l        U(       a  [        U5      OSU l        U R                  5         g)z^
add_pooling_layer (bool, *optional*, defaults to `True`):
    Whether to add a pooling layer
N)
r/   r0   rI   r!   r[   r0  rx  rU  pooler	post_init)rH   rI   add_pooling_layerrJ   s      rK   r0   XmodModel.__init__  sK    
 	 (0"6*,=j(4 	rM   c                 .    U R                   R                  $ r   r[   r5   rH   s    rK   get_input_embeddingsXmodModel.get_input_embeddings  s    ...rM   c                 $    XR                   l        g r   r  )rH   r{   s     rK   set_input_embeddingsXmodModel.set_input_embeddings  s    */'rM   c                     UR                  5        H7  u  p#U R                  R                  U   R                  R	                  U5        M9     g)z
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
N)itemsrx  r5  r   r   )rH   heads_to_pruner5  r   s       rK   _prune_headsXmodModel._prune_heads  s<    
 +002LELLu%//;;EB 3rM   rT   r  r   r,   r)   r   rU   r   r%  r9  r:  r   r;  r<  r   r   c                 Z   Ub  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nU R                   R                  (       a  Ub  UOU R                   R
                  nOSnUb  Ub  [        S5      eUb"  U R                  X5        UR                  5       nO"Ub  UR                  5       SS nO[        S5      eUu  nnUb  UR                  OUR                  nSnU
b:  [        U
[        5      (       d  U
S   S   R                  S   OU
R                  5       nUc  U R                   R                  c  [        S5      e[        U R                   R"                  S   R$                  R&                  R)                  5       5      nUR+                  U R                   R                  5      nU[,        R.                  " UUS	9-  nUc  [,        R.                  " UUU-   4US	9nUcs  [1        U R2                  S
5      (       a4  U R2                  R4                  SS2SU24   nUR7                  UU5      nUnO$[,        R8                  " U[,        R:                  US9nU R=                  UU5      nU R                   R                  (       aE  UbB  UR                  5       u  nnnUU4nU	c  [,        R.                  " UUS	9n	U R?                  U	5      nOSnU RA                  X`R                   RB                  5      nU R3                  UUUUUS9nU R!                  UUUUUUU
UUUUUS9n U S   n!U RD                  b  U RE                  U!5      OSn"U(       d
  U!U"4U SS -   $ [G        U!U"U RH                  U RJ                  U RL                  U RN                  S9$ )
lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of the language adapters that should be activated for each sample, respectively. Default: the index
    that corresponds to `self.config.default_language`.
NFzDYou cannot specify both input_ids and inputs_embeds at the same timer*   z5You have to specify either input_ids or inputs_embedsr   r   zPInput language unknown. Please call `XmodPreTrainedModel.set_default_language()`)rP   r,   rO   )rT   r)   r,   rU   rV   )r  r   r   r   r%  r9  r:  r   r;  r<  r   r   )rD  pooler_outputr9  r   rE  rF  )(rI   r   r;  use_return_dictr~   r:  rt   %warn_if_padding_and_no_attention_maskrE   rP   r   r   r   get_seq_lengthrq  r  rx  r5  r   r   r   r   rA   onesrS   r[   r,   rC   rD   rF   get_extended_attention_maskinvert_attention_maskget_head_maskr4  r  r   r9  r   rE  rF  )#rH   rT   r  r   r,   r)   r   rU   r   r%  r9  r:  r   r;  r<  r   rW   r   rX   rP   rV   adapter_languagesdefault_lang_idrY   rZ   extended_attention_maskencoder_batch_sizeencoder_sequence_lengthr   encoder_hidden_shapeencoder_extended_attention_maskembedding_outputencoder_outputssequence_outputr]  s#                                      rK   r\   XmodModel.forward  s   0 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B];;!!%.%:	@U@UII ]%>cdd"66yQ#..*K&',,.s3KTUU!,
J%.%:!!@T@T!"& "/599  "1%++B/$335 # {{++3 !stt $T\\%7%7%:%A%A%Q%Q%V%V%X Y/55dkk6R6RSO&Jv)NNH!"ZZ*jCY6Y)ZdjkN!t(899*.//*H*HKZK*X'3J3Q3QR\^h3i0!A!&[

SY!Z 150P0PQ_al0m ;;!!&;&G=R=W=W=Y: 7$68O#P %-).4HQW)X&.2.H.HI_.`+.2+ &&y++2O2OP	??%)'#9 + 
 ,,2"7#B+/!5#) ' 
 *!,8<8OO4UY#]3oab6III;-'+;;)77&11,==
 	
rM   )rI   r[   rx  r  )T)NNNNNNNNNNNNNNN)rb   rc   rd   re   r0   r  r  r  r   r   rA   r   
LongTensorr  r   r   r   r   r   r\   rg   rh   ri   s   @rK   r  r    s    "/0C  -1/31515/3,0048<9==A$(,0/3&*15!A
ELL)A
 5++,A
 !.	A

 !.A
 u||,A
 ELL)A
  -A
  (5A
 !) 6A
 "$u'8'8"9:A
 D>A
 $D>A
 'tnA
 d^A
  !.!A
" 
uU\\"$PP	Q#A
 A
rM   r  zQ
    X-MOD Model with a `language modeling` head on top for CLM fine-tuning.
    c            &       8  ^  \ rS rSrSS/rU 4S jrS rS r\                SS\	\
R                     S\	\
R                     S	\	\
R                     S
\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\\\
R                           S\	\   S\	\   S\	\   S\	\   S\	\
R                     S\\\
R                     \4   4"S jj5       rSrU =r$ )XmodForCausalLMiV  lm_head.decoder.weightlm_head.decoder.biasc                    > [         TU ]  U5        UR                  (       d  [        R	                  S5        [        USS9U l        [        U5      U l        U R                  5         g )NzLIf you want to use `XmodLMHeadModel` as a standalone, add `is_decoder=True.`Fr  
r/   r0   r~   rH  warningr  ra  rl  lm_headr  rG   s     rK   r0   XmodForCausalLM.__init___  sL       NNij 5A!&) 	rM   c                 .    U R                   R                  $ r   r  decoderr  s    rK   get_output_embeddings%XmodForCausalLM.get_output_embeddingsl      ||###rM   c                 $    XR                   l        g r   r  rH   new_embeddingss     rK   set_output_embeddings%XmodForCausalLM.set_output_embeddingsp      -rM   rT   r  r   r,   r)   r   rU   r   r%  labelsr9  r:  r   r;  r<  r   r   c                    Ub  UOU R                   R                  nU
b  SnU R                  UUUUUUUUU	UUUUUUS9nUS   nU R                  U5      nSnU
b*  U R                  " UU
4SU R                   R
                  0UD6nU(       d  U4USS -   nUb  U4U-   $ U$ [        UUUR                  UR                  UR                  UR                  S9$ )a  
lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of the language adapters that should be activated for each sample, respectively. Default: the index
    that corresponds to `self.config.default_language`.
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in
    `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are
    ignored (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`

Example:

```python
>>> from transformers import AutoTokenizer, XmodForCausalLM, AutoConfig
>>> import torch

>>> tokenizer = AutoTokenizer.from_pretrained("FacebookAI/xlm-roberta-base")
>>> config = AutoConfig.from_pretrained("facebook/xmod-base")
>>> config.is_decoder = True
>>> model = XmodForCausalLM.from_pretrained("facebook/xmod-base", config=config)
>>> model.set_default_language("en_XX")

>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)

>>> prediction_logits = outputs.logits
```NF)r  r   r,   r)   r   rU   r   r%  r9  r:  r   r;  r<  r   r   r2   rr   )losslogitsr9  r   rE  rF  )rI   r  ra  r  loss_functionr2   r   r9  r   rE  rF  )rH   rT   r  r   r,   r)   r   rU   r   r%  r  r9  r:  r   r;  r<  r   kwargsr   r  prediction_scoreslm_lossr   s                          rK   r\   XmodForCausalLM.forwards  s%   ^ &1%<k$++B]B]I,,))%'"7#9+/!5#)  
$ "!* LL9((!  ;;11 	G ')GABK7F,3,?WJ'KVK0$#33!//))$55
 	
rM   r  ra  )NNNNNNNNNNNNNNNN)rb   rc   rd   re   _tied_weights_keysr0   r  r  r   r   rA   r  r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r  r  V  s    34JK
$.  15/36:59371559=A>B-1EI$(,0/3&*15#[
E,,-[
 5++,[
 !!2!23	[

 !!1!12[
 u//0[
 E--.[
   1 12[
  ((9(9:[
 !)):): ;[
 ))*[
 "%e.?.?(@"AB[
 D>[
 $D>[
 'tn[
  d^![
" !.#[
& 
uU\\"$EE	F'[
 [
rM   r  c                      ^  \ rS rSrSS/rU 4S jrS rS r\             SS\	\
R                     S\	\
R                     S	\	\
R                     S
\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\
R                     S\	\   S\	\   S\	\   S\\\
R                      \4   4S jj5       rSrU =r$ )XmodForMaskedLMi  r  r  c                    > [         TU ]  U5        UR                  (       a  [        R	                  S5        [        USS9U l        [        U5      U l        U R                  5         g )NzkIf you want to use `XmodForMaskedLM` make sure `config.is_decoder=False` for bi-directional self-attention.Fr  r  rG   s     rK   r0   XmodForMaskedLM.__init__  sR     NN1
 !5A!&) 	rM   c                 .    U R                   R                  $ r   r  r  s    rK   r  %XmodForMaskedLM.get_output_embeddings  r  rM   c                 $    XR                   l        g r   r  r  s     rK   r  %XmodForMaskedLM.set_output_embeddings  r  rM   rT   r  r   r,   r)   r   rU   r   r%  r  r   r;  r<  r   c                    Ub  UOU R                   R                  nU R                  UUUUUUUUU	UUUS9nUS   nU R                  U5      nSnU
bF  [	        5       nU" UR                  SU R                   R                  5      U
R                  S5      5      nU(       d  U4USS -   nUb  U4U-   $ U$ [        UUUR                  UR                  S9$ )av  
lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of the language adapters that should be activated for each sample, respectively. Default: the index
    that corresponds to `self.config.default_language`.
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the masked language modeling loss. Indices should be in `[-100, 0, ...,
    config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are ignored (masked), the
    loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`
N)r  r   r,   r)   r   rU   r   r%  r   r;  r<  r   r*   rr   r  r  r   rE  )
rI   r  ra  r  r   r   r2   r   r   rE  )rH   rT   r  r   r,   r)   r   rU   r   r%  r  r   r;  r<  r   r  r  masked_lm_lossloss_fctr   s                       rK   r\   XmodForMaskedLM.forward  s   4 &1%<k$++B]B],,))%'"7#9/!5#  
 "!* LL9')H%&7&<&<RAWAW&XZ`ZeZefhZijN')GABK7F3A3M^%.YSYY$!//))	
 	
rM   r  )NNNNNNNNNNNNN)rb   rc   rd   re   r  r0   r  r  r   r   rA   r  r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r  r    sr   24JK $.  15/36:59371559=A>B-1,0/3&*:
E,,-:
 5++,:
 !!2!23	:

 !!1!12:
 u//0:
 E--.:
   1 12:
  ((9(9::
 !)):): ;:
 ))*:
 $D>:
 'tn:
 d^:
 
uU\\"N2	3:
 :
rM   r  c                   8   ^  \ rS rSrSrU 4S jrS rS rSrU =r	$ )rl  i-  z*Roberta Head for masked language modeling.c                   > [         TU ]  5         [        R                  " UR                  UR                  5      U l        [        R                  " UR                  UR                  S9U l        [        R                  " UR                  UR                  5      U l
        [        R                  " [        R                  " UR                  5      5      U l        U R                  U R                  l        g r   )r/   r0   r   rx   r3   r   r:   r;   
layer_normr2   r  	ParameterrA   rD   ri  rG   s     rK   r0   XmodLMHead.__init__0  s    YYv1163E3EF
,,v'9'9v?T?TUyy!3!3V5F5FGLLV->->!?@	 IIrM   c                     U R                  U5      n[        U5      nU R                  U5      nU R                  U5      nU$ r   )r   r   r  r  rH   featuresr  xs       rK   r\   XmodLMHead.forward9  s;    JJx GOOA LLOrM   c                     U R                   R                  R                  R                  S:X  a  U R                  U R                   l        g U R                   R                  U l        g )Nmeta)r  ri  rP   typer  s    rK   _tie_weightsXmodLMHead._tie_weightsC  sC     <<##((F2 $		DLL))DIrM   )ri  r  r   r  )
rb   rc   rd   re   rf   r0   r\   r  rg   rh   ri   s   @rK   rl  rl  -  s    4&* *rM   rl  z
    X-MOD Model transformer with a sequence classification/regression head on top (a linear layer on top of the pooled
    output) e.g. for GLUE tasks.
    c                     ^  \ rS rSrU 4S jr\           SS\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S	\\R                     S
\\R                     S\\
   S\\
   S\\
   S\\\R                     \4   4S jj5       rSrU =r$ )XmodForSequenceClassificationiL  c                    > [         TU ]  U5        UR                  U l        Xl        [	        USS9U l        [        U5      U l        U R                  5         g NFr  )	r/   r0   
num_labelsrI   r  ra  XmodClassificationHead
classifierr  rG   s     rK   r0   &XmodForSequenceClassification.__init__T  sH      ++ 5A08 	rM   rT   r  r   r,   r)   r   rU   r  r   r;  r<  r   c                 2   Ub  UOU R                   R                  nU R                  UUUUUUUU	U
US9
nUS   nU R                  U5      nSnUGb  U R                   R                  c  U R
                  S:X  a  SU R                   l        OoU R
                  S:  aN  UR                  [        R                  :X  d  UR                  [        R                  :X  a  SU R                   l        OSU R                   l        U R                   R                  S:X  aI  [        5       nU R
                  S:X  a&  U" UR                  5       UR                  5       5      nOU" X5      nOU R                   R                  S:X  a=  [        5       nU" UR                  SU R
                  5      UR                  S5      5      nO,U R                   R                  S:X  a  [        5       nU" X5      nU(       d  U4US	S -   nUb  U4U-   $ U$ [        UUUR                   UR"                  S
9$ )aa  
lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of the language adapters that should be activated for each sample, respectively. Default: the index
    that corresponds to `self.config.default_language`.
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
    Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
    config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
    `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
N	r  r   r,   r)   r   rU   r   r;  r<  r   r   
regressionsingle_label_classificationmulti_label_classificationr*   rr   r  )rI   r  ra  r  problem_typer  r.   rA   rF   ru   r   squeezer   r   r   r   r   rE  rH   rT   r  r   r,   r)   r   rU   r  r   r;  r<  r   r  r  r  r  r   s                     rK   r\   %XmodForSequenceClassification.forward_  s   0 &1%<k$++B]B],,))%'/!5#  
 "!*1{{''/??a'/;DKK,__q(fllejj.HFLL\a\e\eLe/LDKK,/KDKK,{{''<7"9??a'#FNN$4fnn6FGD#F3D))-JJ+-B @&++b/R))-II,./Y,F)-)9TGf$EvE'!//))	
 	
rM   )r  rI   r  ra  NNNNNNNNNNN)rb   rc   rd   re   r0   r   r   rA   r  r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r  r  L  s;   	  15/36:59371559-1,0/3&*H
E,,-H
 5++,H
 !!2!23	H

 !!1!12H
 u//0H
 E--.H
   1 12H
 ))*H
 $D>H
 'tnH
 d^H
 
uU\\"$<<	=H
 H
rM   r  c                     ^  \ rS rSrU 4S jr\           SS\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S	\\R                     S
\\R                     S\\
   S\\
   S\\
   S\\\R                     \4   4S jj5       rSrU =r$ )XmodForMultipleChoicei  c                    > [         TU ]  U5        [        U5      U l        [        R
                  " UR                  5      U l        [        R                  " UR                  S5      U l
        U R                  5         g )Nr   )r/   r0   r  ra  r   r<   r=   r>   rx   r3   r  r  rG   s     rK   r0   XmodForMultipleChoice.__init__  sV      (zz&"<"<=))F$6$6: 	rM   rT   r  r,   r   r  r)   r   rU   r   r;  r<  r   c                    Ub  UOU R                   R                  nUb  UR                  S   OUR                  S   nUb!  UR                  SUR	                  S5      5      OSnUb2  UR                  UR	                  S5      UR	                  S5      -  5      OSnUb!  UR                  SUR	                  S5      5      OSnUb!  UR                  SUR	                  S5      5      OSnUb!  UR                  SUR	                  S5      5      OSnUb1  UR                  SUR	                  S5      UR	                  S5      5      OSnU R                  UUUUUUUU	U
US9
nUS   nU R                  U5      nU R                  U5      nUR                  SU5      nSnUb  [        5       nU" UU5      nU(       d  U4USS -   nUb  U4U-   $ U$ [        UUUR                  UR                  S9$ )	a  
input_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`):
    Indices of input sequence tokens in the vocabulary.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are input IDs?](../glossary#input-ids)
lang_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
    Indices of the language adapters that should be activated for each sample, respectively. Default: the index
    that corresponds to `self.config.default_language`.
token_type_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
    Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
    1]`:

    - 0 corresponds to a *sentence A* token,
    - 1 corresponds to a *sentence B* token.

    [What are token type IDs?](../glossary#token-type-ids)
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
    Labels for computing the multiple choice classification loss. Indices should be in `[0, ...,
    num_choices-1]` where `num_choices` is the size of the second dimension of the input tensors. (See
    `input_ids` above)
position_ids (`torch.LongTensor` of shape `(batch_size, num_choices, sequence_length)`, *optional*):
    Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
    config.max_position_embeddings - 1]`.

    [What are position IDs?](../glossary#position-ids)
inputs_embeds (`torch.FloatTensor` of shape `(batch_size, num_choices, sequence_length, hidden_size)`, *optional*):
    Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
    is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
    model's internal embedding lookup matrix.
Nr   r*   r   r   )	r  r)   r,   r   r   rU   r   r;  r<  rr   r  )rI   r  r   r   rE   repeatra  r>   r  r   r   r   rE  )rH   rT   r  r,   r   r  r)   r   rU   r   r;  r<  num_choicesflat_input_idsflat_lang_idsflat_position_idsflat_token_type_idsflat_attention_maskflat_inputs_embedsr   r]  r  reshaped_logitsr  r  r   s                             rK   r\   XmodForMultipleChoice.forward  s   ` &1%<k$++B]B],5,Aiooa(}GZGZ[\G]CLCXINN2,>?^bRZRf	q(9INN1<M(MNlpLXLdL--b,2C2CB2GHjnR`Rln11"n6I6I"6MNrvR`Rln11"n6I6I"6MNrv ( r=#5#5b#9=;M;Mb;QR 	 ,,"*..,/!5#  
  
]3/ ++b+6')HOV4D%''!"+5F)-)9TGf$EvE("!//))	
 	
rM   )r  r>   ra  r  )rb   rc   rd   re   r0   r   r   rA   r  r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r  r    s;     15/3596:-1371559,0/3&*]
E,,-]
 5++,]
 !!1!12	]

 !!2!23]
 ))*]
 u//0]
 E--.]
   1 12]
 $D>]
 'tn]
 d^]
 
uU\\"$==	>]
 ]
rM   r  c                     ^  \ rS rSrU 4S jr\           SS\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S	\\R                     S
\\R                     S\\
   S\\
   S\\
   S\\\R                     \4   4S jj5       rSrU =r$ )XmodForTokenClassificationi  c                 d  > [         TU ]  U5        UR                  U l        [        USS9U l        UR
                  b  UR
                  OUR                  n[        R                  " U5      U l	        [        R                  " UR                  UR                  5      U l        U R                  5         g r  )r/   r0   r  r  ra  classifier_dropoutr=   r   r<   r>   rx   r3   r  r  rH   rI   r  rJ   s      rK   r0   #XmodForTokenClassification.__init__  s      ++ 5A)/)B)B)NF%%TZTnTn 	 zz"45))F$6$68I8IJ 	rM   rT   r  r   r,   r)   r   rU   r  r   r;  r<  r   c                    Ub  UOU R                   R                  nU R                  UUUUUUUU	U
US9
nUS   nU R                  U5      nU R	                  U5      nSnUb<  [        5       nU" UR                  SU R                  5      UR                  S5      5      nU(       d  U4USS -   nUb  U4U-   $ U$ [        UUUR                  UR                  S9$ )a  
lang_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Indices of the language adapters that should be activated for each sample, respectively. Default: the index
    that corresponds to `self.config.default_language`.
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the token classification loss. Indices should be in `[0, ..., config.num_labels - 1]`.
Nr  r   r*   rr   r  )rI   r  ra  r>   r  r   r   r  r   r   rE  r  s                     rK   r\   "XmodForTokenClassification.forward*  s    , &1%<k$++B]B],,))%'/!5#  
 "!*,,71')HFKKDOO<fkk"oNDY,F)-)9TGf$EvE$!//))	
 	
rM   )r  r>   r  ra  r  )rb   rc   rd   re   r0   r   r   rA   r  r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r	  r	    s-     15/36:59371559-1,0/3&*7
E,,-7
 5++,7
 !!2!23	7

 !!1!127
 u//07
 E--.7
   1 127
 ))*7
 $D>7
 'tn7
 d^7
 
uU\\"$99	:7
 7
rM   r	  c                   2   ^  \ rS rSrSrU 4S jrS rSrU =r$ )r  if  z-Head for sentence-level classification tasks.c                 b  > [         TU ]  5         [        R                  " UR                  UR                  5      U l        UR                  b  UR                  OUR                  n[        R                  " U5      U l	        [        R                  " UR                  UR                  5      U l        g r   )r/   r0   r   rx   r3   r   r  r=   r<   r>   r  out_projr  s      rK   r0   XmodClassificationHead.__init__i  s    YYv1163E3EF
)/)B)B)NF%%TZTnTn 	 zz"45		&"4"4f6G6GHrM   c                     US S 2SS S 24   nU R                  U5      nU R                  U5      n[        R                  " U5      nU R                  U5      nU R	                  U5      nU$ r[  )r>   r   rA   tanhr  r  s       rK   r\   XmodClassificationHead.forwardr  sY    Q1WLLOJJqMJJqMLLOMM!rM   )r   r>   r  )	rb   rc   rd   re   rf   r0   r\   rg   rh   ri   s   @rK   r  r  f  s    7I rM   r  c                     ^  \ rS rSrU 4S jr\            SS\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S	\\R                     S
\\R                     S\\R                     S\\
   S\\
   S\\
   S\\\R                     \4   4S jj5       rSrU =r$ )XmodForQuestionAnsweringi|  c                    > [         TU ]  U5        UR                  U l        [        USS9U l        [
        R                  " UR                  UR                  5      U l        U R                  5         g r  )
r/   r0   r  r  ra  r   rx   r3   
qa_outputsr  rG   s     rK   r0   !XmodForQuestionAnswering.__init__  sU      ++ 5A))F$6$68I8IJ 	rM   rT   r  r   r,   r)   r   rU   start_positionsend_positionsr   r;  r<  r   c                 (   Ub  UOU R                   R                  nU R                  UUUUUUUU
UUS9
nUS   nU R                  U5      nUR	                  SSS9u  nnUR                  S5      R                  5       nUR                  S5      R                  5       nSnUb  U	b  [        UR                  5       5      S:  a  UR                  S5      n[        U	R                  5       5      S:  a  U	R                  S5      n	UR                  S5      nUR                  SU5      nU	R                  SU5      n	[        US9nU" UU5      nU" UU	5      nUU-   S-  nU(       d  UU4USS -   nUb  U4U-   $ U$ [        UUUUR                  UR                  S	9$ )
r  Nr  r   r   r*   r   )ignore_indexrr   )r  start_logits
end_logitsr   rE  )rI   r  ra  r  r
  r  r   r   rE   clampr   r   r   rE  )rH   rT   r  r   r,   r)   r   rU   r  r  r   r;  r<  r   r  r  r   r!  
total_lossignored_indexr  
start_lossend_lossr   s                           rK   r\    XmodForQuestionAnswering.forward  s   * &1%<k$++B]B],,))%'/!5#  
 "!*1#)<<r<#: j#++B/::<''+668

&=+D?'')*Q."1"9"9""==%%'(1, - 5 5b 9(--a0M-33A}EO)//=AM']CH!,@J
M:H$x/14J"J/'!"+=F/9/EZMF*Q6Q+%!!//))
 	
rM   )r  r  ra  )NNNNNNNNNNNN)rb   rc   rd   re   r0   r   r   rA   r  r   r   r   r   r   r   r\   rg   rh   ri   s   @rK   r  r  |  sT     15/36:593715596:48,0/3&*E
E,,-E
 5++,E
 !!2!23	E

 !!1!12E
 u//0E
 E--.E
   1 12E
 "%"2"23E
   0 01E
 $D>E
 'tnE
 d^E
 
uU\\"$@@	AE
 E
rM   r  c                     U R                  U5      R                  5       n[        R                  " USS9R	                  U5      U-   U-  nUR                  5       U-   $ )z
Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding symbols
are ignored. This is modified from fairseq's `utils.make_positions`.

Args:
    x: torch.Tensor x:

Returns: torch.Tensor
r   r   )neru   rA   cumsumtype_asrF   )rT   r$   rV   maskincremental_indicess        rK   rQ   rQ     sW     <<$((*D <<!4<<TBE[[_cc##%33rM   )r  r  r  r  r  r	  r  r`  )r   )Drf   r   typingr   r   rA   torch.utils.checkpointr   torch.nnr   r   r   activationsr
   r   cache_utilsr   r   
generationr   modeling_layersr   modeling_outputsr   r   r   r   r   r   r   r   modeling_utilsr   pytorch_utilsr   r   r   utilsr   r   configuration_xmodr   
get_loggerrb   rH  Moduler!   rk   r   r   r   r   r   r  r0  rU  r`  r  r  r  rl  r  r  r	  r  r  rQ   __all__r>  rM   rK   <module>r=     sP     "    A A ' 5 ) 9	 	 	 . l l , * 
		H	%V=RYY V=t@.		 @.FRYY 5BII 5rryy ")) $/ /dI3* I3Xa
")) a
J  30/ 30 30l e
# e
e
P 
t
)? t

t
n V
) V
 V
t* *> V
$7 V
V
r j
/ j
 j
Z H
!4 H
 H
XRYY , R
2 R
 R
l4 	rM   