
    <hy                       S r SSKrSSKrSSKrSSKJr  SSKJrJr  SSK	r	SSK
r	SSK	JrJr  SSKJr  SSKJr  SS	KJrJr  SS
KJr  SSKJr  SSKJr  SSKJr  SSKJrJrJr  SSK J!r!  \RD                  " \#5      r$S@S jr%S r&S@S jr'S r(\\" SS9 " S S\5      5       5       r)\\" SS9 " S S\5      5       5       r*\\" SS9 " S S\5      5       5       r+\\" SS9 " S S \5      5       5       r,\ " S! S"\5      5       r- " S# S$\R\                  5      r/ " S% S&\R`                  5      r1 " S' S(\R`                  5      r2 " S) S*\R`                  5      r3 " S+ S,\5      r4 " S- S.\5      r5\" S/S9 " S0 S1\-5      5       r6\" S2S9 " S3 S4\-5      5       r7\ " S5 S6\-5      5       r8\" S7S9 " S8 S9\-\5      5       r9\" S:S9 " S; S<\-\5      5       r: " S= S>\-5      r;/ S?Qr<g)AzRPyTorch ProphetNet model, ported from ProphetNet repo(fairsequery_states version).    N)	dataclass)OptionalUnion)Tensornn)	LayerNorm   )ACT2FN)CacheEncoderDecoderCache)GenerationMixin)GradientCheckpointingLayer)BaseModelOutput)PreTrainedModel)ModelOutputauto_docstringlogging   )ProphetNetConfigc                     U(       a,  [         R                  R                  U R                  5       US9$ [         R                  R                  X[        R
                  S9$ )Ndimr   dtype)r   
functionalsoftmaxfloattorchfloat32)hidden_stater   
onnx_traces      j/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/prophetnet/modeling_prophetnet.pyr   r   )   sF    }}$$\%7%7%9s$CC}}$$\%--$PP    c                 h   [         R                  " XU 4X#S9[         R                  " U5      R                  -  nUR	                  5       R                  5       n[        U5       H,  nXV   R                  SSS9  XF   R                  U* S-   5        M.     SUSS2SS2S4'   [         R                  " XE/SS9$ )	z8
This function computes the bias for the predict stream
)devicer   r   F)wrapr   N   r   )
r   onesfinfomindetachclonerangefill_diagonal_triu_cat)sequence_lengthngramr%   r   
left_blockright_block
stream_idxs          r"   ngram_attention_biasr6   0   s    
 	

EO<VY\a\g\ghm\n\r\rr  ##%++-KEl
..qu.=$$j[1_5 # Jq!Qw99j.A66r#   c                    U* nSnU(       a[  U S-  n U[         R                  " U[         R                  " U5      5      R                  5       U -  -   n[         R                  " U5      nO+[         R
                  " U[         R                  " U5      5      nU S-  n[         R                  " XF5      nU[         R                  " UR                  5       U-  5      [        R                  " X-  5      -  X-
  -  -   n[         R                  " U[         R                  " U5      U S-
  -  5      R                  5       nU[         R                  " XtR                  5       U5      -   nU$ )zg
This function computes individual parts of the relative position buckets. For more detail, see paper.
r   r'   r   )r   lt
zeros_likeintabsmaxlogr   mathr*   	ones_likewhere)	num_bucketsmax_distancerelative_positionsis_bidirectionalinv_relative_positionsrel_positions_bucket	max_exactis_smallval_if_larges	            r"   compute_relative_bucketsrJ   A   s>    10!Q& hh-u/?/?@V/WX\\^allm 	 "'+A!B!&+A5CSCSTjCk!lq Ixx.:Huyy)?)E)E)G))STW[W_W_ X  		  " "L 99\5??<+HKZ[O+\]aacL/%++hHbHbHdfr2ssr#   c                 x   UR                  S5      R                  SUR                  S5      S5      nX2R                  S5      -
  n[        R                  " US-
  U4SS9R                  S5      nUR                  SUR                  S5      S5      nXBR                  S5      -
  n[        XUSS9n[        XUSS9nXV4$ )ze
This function computes both main and predict relative position buckets. For more detail, see paper.
r   r   F)rD   )	unsqueezerepeatsizer   r0   rJ   )rA   rB   position_idsmain_stream_relative_positions$predicting_stream_relative_positionsmain_relative_position_buckets!predict_relative_position_bucketss          r"   #compute_all_stream_relative_bucketsrU   \   s    
 &2%;%;A%>%E%EaIZIZ[]I^`a%b"%CF\F\]_F`%`" ,199lQ6F5U[]+^+h+hij+k(+O+V+VWXZfZkZklnZoqr+s(+ORhRhikRl+l( &>#ATY&" )A#GZ_)% *LLr#   zF
    Base class for sequence-to-sequence language models outputs.
    )custom_introc                   :   \ rS rSr% SrSr\\R                     \	S'   Sr
\\R                     \	S'   Sr\\R                     \	S'   Sr\\\R                        \	S'   Sr\\\R                        \	S'   Sr\\\R                        \	S	'   Sr\\\R                        \	S
'   Sr\\\R                        \	S'   Sr\\\R                        \	S'   Sr\\R                     \	S'   Sr\\\R                        \	S'   Sr\\\R                        \	S'   \S 5       rSrg)ProphetNetSeq2SeqLMOutputs   a  
loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided):
    Language modeling loss.
logits (`torch.FloatTensor` of shape `(batch_size, decoder_sequence_length, config.vocab_size)`):
    Prediction scores of the main stream language modeling head (scores for each vocabulary token before
    SoftMax).
logits_ngram (`torch.FloatTensor` of shape `(batch_size, ngram * decoder_sequence_length, config.vocab_size)`):
    Prediction scores of the predict stream language modeling head (scores for each vocabulary token before
    SoftMax).
past_key_values (`list[torch.FloatTensor]`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
    List of `torch.FloatTensor` of length `config.n_layers`, with each tensor of shape `(2, batch_size,
    num_attn_heads, decoder_sequence_length, embed_size_per_head)`).

    Contains pre-computed hidden-states (key and values in the attention blocks) of the decoder that can be
    used (see `past_key_values` input) to speed up sequential decoding.
decoder_ngram_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
    Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
    shape `(batch_size, ngram * decoder_sequence_length, hidden_size)`.

    Hidden-states of the predict stream of the decoder at the output of each layer plus the initial embedding
    outputs.
decoder_ngram_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_attn_heads,
    decoder_sequence_length, decoder_sequence_length)`.

    Attentions weights of the predict stream of the decoder, after the attention softmax, used to compute the
    weighted average in the self-attention heads.
encoder_last_hidden_state (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*):
    Sequence of hidden-states at the output of the last layer of the encoder of the model.
Nlosslogitslogits_ngrampast_key_valuesdecoder_hidden_statesdecoder_ngram_hidden_statesdecoder_attentionsdecoder_ngram_attentionscross_attentionsencoder_last_hidden_stateencoder_hidden_statesencoder_attentionsc                 P    [         R                  " S[        5        U R                  $ Nzi`decoder_cross_attentions` is deprecated and will be removed soon. Please use `cross_attentions` instead.warningswarnFutureWarningrb   selfs    r"   decoder_cross_attentions2ProphetNetSeq2SeqLMOutput.decoder_cross_attentions   $    	

 $$$r#    )__name__
__module____qualname____firstlineno____doc__rZ   r   r   FloatTensor__annotations__r[   r\   r]   tupler^   r_   r`   ra   rb   rc   rd   re   propertyrn   __static_attributes__rq   r#   r"   rX   rX   s   sG   > )-D(5$$
%,*.FHU&&'.04L(5,,-4:>OXeE$5$567>@D8E%*;*;$<=DFJ%0A0A*B!CJ=Au'8'8!9:ACGhuU->->'?@G;?huU%6%678?=Ax(9(9:A@D8E%*;*;$<=D=Au'8'8!9:A% %r#   rX   z
    Base class for model encoder's outputs that also contains : pre-computed hidden states that can speed up sequential
    decoding.
    c                      \ rS rSr% Sr\R                  \S'   Sr\	\R                     \S'   Sr
\	\\R                        \S'   Sr\	\\R                        \S'   Sr\	\\R                        \S'   Sr\	\\R                        \S	'   Sr\	\\R                        \S
'   Sr\	\\R                        \S'   Sr\	\R                     \S'   Sr\	\\R                        \S'   Sr\	\\R                        \S'   \S 5       rSrg)ProphetNetSeq2SeqModelOutput   a  
last_hidden_state (`torch.FloatTensor` of shape `(batch_size, decoder_sequence_length, hidden_size)`):
    Sequence of main stream hidden-states at the output of the last layer of the decoder of the model.

    If `past_key_values` is used only the last hidden-state of the sequences of shape `(batch_size, 1,
    hidden_size)` is output.
last_hidden_state_ngram (`torch.FloatTensor` of shape `(batch_size,ngram * decoder_sequence_length, config.vocab_size)`, *optional*):
    Sequence of predict stream hidden-states at the output of the last layer of the decoder of the model.
past_key_values (`list[torch.FloatTensor]`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
    List of `torch.FloatTensor` of length `config.n_layers`, with each tensor of shape `(2, batch_size,
    num_attn_heads, decoder_sequence_length, embed_size_per_head)`).

    Contains pre-computed hidden-states (key and values in the attention blocks) of the decoder that can be
    used (see `past_key_values` input) to speed up sequential decoding.
decoder_ngram_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
    Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
    shape `(batch_size, ngram * decoder_sequence_length, hidden_size)`.

    Hidden-states of the predict stream of the decoder at the output of each layer plus the initial embedding
    outputs.
decoder_ngram_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_attn_heads,
    decoder_sequence_length, decoder_sequence_length)`.

    Attentions weights of the predict stream of the decoder, after the attention softmax, used to compute the
    weighted average in the
encoder_last_hidden_state (`torch.FloatTensor` of shape `(batch_size, encoder_sequence_length, hidden_size)`, *optional*):
    Sequence of hidden-states at the output of the last layer of the encoder of the model.
last_hidden_stateNlast_hidden_state_ngramr]   r^   r_   r`   ra   rb   rc   rd   re   c                 P    [         R                  " S[        5        U R                  $ rg   rh   rl   s    r"   rn   5ProphetNetSeq2SeqModelOutput.decoder_cross_attentions   rp   r#   rq   )rr   rs   rt   ru   rv   r   rw   rx   r   r   r]   ry   r^   r_   r`   ra   rb   rc   rd   re   rz   rn   r{   rq   r#   r"   r}   r}      s*   < (((;?Xe&7&78?:>OXeE$5$567>@D8E%*;*;$<=DFJ%0A0A*B!CJ=Au'8'8!9:ACGhuU->->'?@G;?huU%6%678?=Ax(9(9:A@D8E%*;*;$<=D=Au'8'8!9:A% %r#   r}   zs
    Base class for model's outputs that may also contain a past key/values (to speed up sequential decoding).
    c                   t   \ rS rSr% Sr\R                  \S'   Sr\	\R                     \S'   Sr
\	\\R                        \S'   Sr\	\\R                        \S'   Sr\	\\R                        \S'   Sr\	\\R                        \S	'   Sr\	\\R                        \S
'   Sr\	\\R                        \S'   Srg)ProphetNetDecoderModelOutput   a  
last_hidden_state (`torch.FloatTensor` of shape `(batch_size, decoder_sequence_length, hidden_size)`):
    Sequence of main stream hidden-states at the output of the last layer of the decoder of the model.

    If `past_key_values` is used only the last hidden-state of the sequences of shape `(batch_size, 1,
    hidden_size)` is output.
last_hidden_state_ngram (`torch.FloatTensor` of shape `(batch_size, ngram * decoder_sequence_length, config.vocab_size)`):
    Sequence of predict stream hidden-states at the output of the last layer of the decoder of the model.
past_key_values (`list[torch.FloatTensor]`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
    List of `torch.FloatTensor` of length `config.n_layers`, with each tensor of shape `(2, batch_size,
    num_attn_heads, decoder_sequence_length, embed_size_per_head)`).

    Contains pre-computed hidden-states (key and values in the attention blocks) of the decoder that can be
    used (see `past_key_values` input) to speed up sequential decoding.
hidden_states_ngram (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
    Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
    shape `(batch_size, ngram * decoder_sequence_length, hidden_size)`.

    Hidden-states of the predict stream of the decoder at the output of each layer plus the initial embedding
    outputs.
ngram_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_attn_heads,
    decoder_sequence_length, decoder_sequence_length)`.

    Attentions weights of the predict stream of the decoder, after the attention softmax, used to compute the
    weighted average in the
r   Nr   r]   hidden_stateshidden_states_ngram
attentionsngram_attentionsrb   rq   )rr   rs   rt   ru   rv   r   rw   rx   r   r   r]   ry   r   r   r   r   rb   r{   rq   r#   r"   r   r      s    8 (((;?Xe&7&78?:>OXeE$5$567>8<M8E%"3"345<>B%(9(9":;B59Ju00129;?huU%6%678?;?huU%6%678?r#   r   c                      \ rS rSr% SrSr\\R                     \	S'   Sr
\\R                     \	S'   Sr\\R                     \	S'   Sr\\\R                        \	S'   Sr\\\R                        \	S'   Sr\\\R                        \	S	'   Sr\\\R                        \	S
'   Sr\\\R                        \	S'   Sr\\\R                        \	S'   Srg)ProphetNetDecoderLMOutputi  av	  
ngram_hidden_states (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
    Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
    shape `(batch_size, ngram * decoder_sequence_length, hidden_size)`.

    Hidden-states of the predict stream of the decoder at the output of each layer plus the initial embedding
    outputs.
loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` is provided):
    Language modeling loss.
logits (`torch.FloatTensor` of shape `(batch_size, decoder_sequence_length, config.vocab_size)`):
    Prediction scores of the main stream language modeling head (scores for each vocabulary token before
    SoftMax).
logits_ngram (`torch.FloatTensor` of shape `(batch_size, ngram * decoder_sequence_length, config.vocab_size)`):
    Prediction scores of the predict stream language modeling head (scores for each vocabulary token before
    SoftMax).
past_key_values (`list[torch.FloatTensor]`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
    List of `torch.FloatTensor` of length `config.n_layers`, with each tensor of shape `(2, batch_size,
    num_attn_heads, decoder_sequence_length, embed_size_per_head)`).

    Contains pre-computed hidden-states (key and values in the attention blocks) of the decoder that can be
    used (see `past_key_values` input) to speed up sequential decoding.
hidden_states_ngram (`tuple(torch.FloatTensor)`, *optional*, returned when `output_hidden_states=True` is passed or when `config.output_hidden_states=True`):
    Tuple of `torch.FloatTensor` (one for the output of the embeddings + one for the output of each layer) of
    shape `(batch_size, ngram * decoder_sequence_length, hidden_size)`.

    Hidden-states of the predict stream of the decoder at the output of each layer plus the initial embedding
    outputs.
ngram_attentions (`tuple(torch.FloatTensor)`, *optional*, returned when `output_attentions=True` is passed or when `config.output_attentions=True`):
    Tuple of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_attn_heads,
    decoder_sequence_length, decoder_sequence_length)`.

    Attentions weights of the predict stream of the decoder, after the attention softmax, used to compute the
    weighted average in the
NrZ   r[   r\   r]   r   r   r   r   rb   rq   )rr   rs   rt   ru   rv   rZ   r   r   rw   rx   r[   r\   r]   ry   r   r   r   r   rb   r{   rq   r#   r"   r   r     s    !F )-D(5$$
%,*.FHU&&'.04L(5,,-4:>OXeE$5$567>8<M8E%"3"345<>B%(9(9":;B59Ju00129;?huU%6%678?;?huU%6%678?r#   r   c                   4    \ rS rSr% \\S'   SrSrS rS r	Sr
g)	ProphetNetPreTrainedModeliN  config
prophetnetTc                 F   [        U[        R                  5      (       ak  UR                  R                  R                  SU R                  R                  S9  UR                  b%  UR                  R                  R                  5         g g [        U[        R                  5      (       ax  UR                  R                  R                  SU R                  R                  S9  UR                  b2  UR                  R                  UR                     R                  5         g g g )N        )meanstd)
isinstancer   Linearweightdatanormal_r   init_stdbiaszero_	Embeddingpadding_idx)rm   modules     r"   _init_weights'ProphetNetPreTrainedModel._init_weightsT  s    fbii((MM&&CT[[5I5I&J{{&  &&( '--MM&&CT[[5I5I&J!!-""6#5#56<<> . .r#   c                    U R                   R                  nU R                   R                  nUc   S5       eUR                  UR                  5      nUSS S24   R                  5       USSS 24'   X$S'   Uc   S5       eUR                  US:H  U5        [        R                  " US:  5      R                  5       (       d   S	5       eU$ )
Nzself.model.config.decoder_start_token_id has to be defined. In ProphetNet it is usually set to the pad_token_id. See ProphetNet docs for more information.rL   r   ).r   z1self.model.config.pad_token_id has to be defined.r   z8Verify that `shifted_input_ids` has only positive values)
r   decoder_start_token_idpad_token_id	new_zerosshaper,   masked_fill_r   allitem)rm   	input_idsr   r   shifted_input_idss        r"   _shift_right&ProphetNetPreTrainedModel._shift_right^  s    !%!C!C{{//%1 	
F	
1 &//	@%.sCRCx%8%>%>%@#qr'"$:&!'\)\\'&&'8D'@,Oyy*a/05577s9ss7  r#   rq   N)rr   rs   rt   ru   r   rx   base_model_prefixsupports_gradient_checkpointingr   r   r{   rq   r#   r"   r   r   N  s    $&*#?!r#   r   c                   T   ^  \ rS rSrSrS\SS4U 4S jjrS
U 4S jjrU 4S jrS	r	U =r
$ )ProphetNetPositionalEmbeddingsiu  z
This module learns positional embeddings up to a fixed maximum size. Padding ids are ignored by either offsetting
based on padding_idx or by setting padding_idx to None and ensuring that the appropriate position ids are passed to
the forward function.
r   returnNc                    > UR                   U l        [        TU ]  UR                   UR                  UR
                  5        g N)max_position_embeddings
max_lengthsuper__init__hidden_sizer   rm   r   	__class__s     r"   r   'ProphetNetPositionalEmbeddings.__init__|  s3     88779K9KVM`M`ar#   c                 D  > Ub  U R                   b   S5       eUc  Ubj  UR                  5       S:w  aV  UR                  5       nUS   U-   n[        R                  " S[        R                  US9[        U R                   U-   5      -  nOUc$  [        R                  " U[        R                  US9n[        R                  " USS9R                  U5      U-  R	                  5       U R                   -   nUR                  SU R                  S-
  5      n[        TU ]-  U5      U4$ )NzCIf position_ids is pre-computed then padding_idx should not be set.r   r   )r   r   r   r%   r   )r   get_seq_lengthr   r(   longr:   cumsumtype_asclampr   r   forward)	rm   inputs_shaper%   attention_maskr]   rP   prev_num_input_idsnum_input_idsr   s	           r"   r   &ProphetNetPositionalEmbeddings.forward  s   $$*:*:*B 	
Q	
C */M/M/OST/T &5%C%C%E" ,Q2D D$zz&

6R((=89  ")%*ZZEJJW]%^N LLQ7??OR``$&4++ ,
  ,11!T__q5HIw|,l::r#   c                 "   > [         TU ]  U5      $ r   )r   r   )rm   rP   r   s     r"   _forward'ProphetNetPositionalEmbeddings._forward  s    w|,,r#   )r   )NNN)rr   rs   rt   ru   rv   r   r   r   r   r{   __classcell__r   s   @r"   r   r   u  s.    b/ bD b;8- -r#   r   c                      ^  \ rS rSrSrSS\S\S\\   4U 4S jjjr      SS\\	   S\\	   S	\\	   S
\\
   S\\   S\\R                     S\\	\\	   4   4S jjrSrU =r$ )ProphetNetAttentioni  z=Multi-headed attention from 'Attention Is All You Need' paperr   num_attn_heads	layer_idxc                   > [         TU ]  5         UR                  nUR                  U l        UR                  U l        X l        XB-  U l        X0l        U R                  U-  U:X  d   S5       e[        R                  " XD5      U l
        [        R                  " XD5      U l        [        R                  " XD5      U l        [        R                  " XD5      U l        g )Nzw`config.hidden_size` must be divisible by `config.num_encoder_attention_heads` and `config.num_decoder_attention_heads`)r   r   r   attention_dropoutdropoutr   head_dimr   r   r   key_proj
value_proj
query_projout_proj)rm   r   r   r   r   r   s        r"   r   ProphetNetAttention.__init__  s    ((!'!9!9~~,#5"}}~-< 	
4	
<
 		+;))K=))K=		+;r#   key_value_statesr   layer_head_maskpast_key_valueoutput_attentionscache_positionr   c                 	   UR                  5       u  pn
US Ln[        UR                  5       5      UU	U
/:X  d   SXU
4 SUR                  5        35       eU R                  U5      U R                  S-  -  nUb]  [	        U[
        5      (       aF  UR                  R                  U R                  5      nU(       a  UR                  nOUR                  nOUnU(       a  UOUnU(       aQ  UbN  W(       aG  WR                  U R                     R                  nUR                  U R                     R                  nOU R                  U5      nU R                  U5      nUR!                  USU R"                  U R                  5      R%                  SS5      nUR!                  USU R"                  U R                  5      R%                  SS5      nUbN  U(       d  UOS nWR'                  UUU R                  SU05      u  nnU(       a  SUR                  U R                  '   UR!                  XU R"                  U R                  5      R%                  SS5      nUR                  S5      n[(        R*                  " S	UUR%                  SS
5      5      nXR"                  U	U4nUR                  5       U:w  a  [-        SU SUR                  5        35      eUb  UR/                  5       S:X  a  S nXR"                  SU4nUb3  UR                  5       U:w  a  [-        SU SUR                  5        35      eUb  UU-   nU(       a  UnOS n[0        R2                  R5                  USS9nUb  UR                  5       U R"                  4:X  d&   SU R"                  4 SUR                  5        35       eUR!                  SSSS5      UR!                  XR"                  U	U5      -  nUR!                  SSSS5      U-  n[0        R2                  R7                  UU R8                  U R:                  S9n[(        R*                  " S	UU5      nXR"                  XR                  4nUR                  5       U:w  a  [-        SU SUR                  5        35      eUR%                  SS5      R=                  XU
5      nU R?                  U5      n[0        R2                  R7                  UU R6                  U R:                  S9nUU4$ )Nz Size of hidden states should be 	, but is       ?rL   r   r'   r   Tzbsij,bsjk->bsikr	   z#Attention weights should have size r   z Attention mask should have size r   /Head mask for a single layer should be of size ptrainingz `attn_output` should have shape , but is of shape ) rO   listr   r   r   r   
is_updatedgetr   cross_attention_cacheself_attention_cachelayerskeysvaluesr   r   viewr   	transposeupdater   einsum
ValueErrorr   r   r   r   r   r   r   reshaper   )rm   r   r   r   r   r   r   r   
batch_sizetgt_lenr   is_cross_attentionquery_statesr   curr_past_key_valuecurrent_states
key_statesvalue_statessrc_lenattn_weightsexpected_shapeattn_weights_reshaped
attn_probsattn_outputs                           r"   r   ProphetNetAttention.forward  s    ,9+=+=+?(
[ .T9M&&().
 
 	p .j;.N-OyYfYkYkYmXno		p 
 }59KL%.*=>>+66::4>>J
%*8*N*N'*8*M*M'&4#-?)]."<,33DNNCHHJ.55dnnELLL~6J??>:L#R9L9Ldmm\ffghjklJ',,ZT=P=PRVR_R_`jjklnopL)7It+>+E+Ednn?OQ_>`,(
L &@DN--dnn=#((d>Q>QSWS`S`akklmopq//!$||$5|ZEYEYZ[]^E_`$&9&97GL.0B>BRR[\h\m\m\o[pqrr %.*<*<*>!*C!N$&9&91gF%.*=*=*?>*Q??OyYgYlYlYnXopqq%'.8L$0!$(!}},,\r,B&"'')d.A.A-CC A4CVCVBXAY Z#((*+-C +//2q!<|?P?P//'@ L
 %4$8$8B1$EH]$]!]]**$$]] + 


 ll#4j,O$&9&97MMR/??OOabmbrbrbtauvww!++Aq199*{[mmK0mm++K4<<RVR_R_+`111r#   )	r   r   r   r   r   r   r   r   r   r   )NNNNFN)rr   rs   rt   ru   rv   r   r:   r   r   r   r   boolr   ry   r   r{   r   r   s   @r"   r   r     s    G</ < <QYZ]Q^ < <0 .2+/,0*.,115j2 #6*j2 !(	j2
 "&)j2 !j2 $D>j2 !.j2 
vx''	(j2 j2r#   r   c                   >   ^  \ rS rSrSrS\S\4U 4S jjrS rSr	U =r
$ )ProphetNetFeedForwardi%  ze
This is the residual two feed-forward layer block based on the original Transformer implementation.
r   ffn_dimc                 ,  > [         TU ]  5         [        UR                     U l        [
        R                  " UR                  U5      U l        [
        R                  " X!R                  5      U l	        UR                  U l
        UR                  U l        g r   )r   r   r
   activation_functionactivation_fnr   r   r   intermediateoutputactivation_dropoutr   )rm   r   r	  r   s      r"   r   ProphetNetFeedForward.__init__*  si    #F$>$>?IIf&8&8'Bii););<"(";";~~r#   c                 4   U R                  U5      nU R                  U5      n[        R                  R	                  XR
                  U R                  S9nU R                  U5      n[        R                  R	                  XR                  U R                  S9nU$ )Nr   )r  r  r   r   r   r  r   r  )rm   r   s     r"   r   ProphetNetFeedForward.forward2  s    ))-8**=9--m?V?Vaeanan-oM2--m||VZVcVc-dr#   )r  r  r   r  r  )rr   rs   rt   ru   rv   r   r:   r   r   r{   r   r   s   @r"   r  r  %  s&    &/ &# & r#   r  c                   z   ^  \ rS rSrSS\4U 4S jjjrS rS r        SS\\	\
      4S jjrS rS	 rS
rU =r$ )ProphetNetNgramSelfAttentioni<  r   c                   > [         TU ]  5         UR                  U l        UR                  U l        UR                  U l        UR
                  U l        UR                  U l        UR                  U l        UR                  U R                  -  U l	        UR                  U l
        X l        U R                  U R                  -  UR                  :X  d   S5       e[        R                  " UR                  UR                  5      U l        [        R                  " UR                  UR                  5      U l        [        R                  " UR                  UR                  5      U l        [        R                  " UR                  UR                  5      U l        [        R                  " UR                  U R                  U R                  -  5      U l        SU l        g )Nz6config.hidden_size must be divisible by num_attn_headsF)r   r   r   rA   relative_max_distancenum_decoder_attention_headsr   r   r   r   r2   r   r   r   r   r   r   r   relative_pos_embeddingsr!   rm   r   r   r   s      r"   r   %ProphetNetNgramSelfAttention.__init__=  sf   !--!--%+%A%A"$@@~~!'!9!9**d.A.AA\\
"}}t222f6H6HH 	
D	
H 		&"4"4f6H6HI))F$6$68J8JK))F$6$68J8JK 		&"4"4f6H6HI (*yy1C1CTEUEUX\XkXkEk'l$  r#   c                     UR                  X2U R                  U R                  5      R                  SS5      R	                  5       $ Nr   r'   )r   r   r   r   
contiguous)rm   tensorseq_lenr   s       r"   _shape#ProphetNetNgramSelfAttention._shape[  s7    {{:0C0CT]]S]]^_abcnnppr#   c                     SU l         g )NT)r!   rl   s    r"   prepare_for_onnx_export_5ProphetNetNgramSelfAttention.prepare_for_onnx_export_^  s	    r#   r   c
           	         UR                  5       u  pn[        UR                  5       5      XU/:X  d   SXU4 SUR                   35       eU R                  U5      nU R	                  U5      nU R                  U5      nXR                  S-  -  nU R                  XU
5      nU R                  USU
5      nU R                  USU
5      nXR                  SU R                  4nUR                  " U6 nUR                  " U6 nUR                  " U6 nUR                  SU R                  -   SS9nUR                  SU R                  -   SS9nUR                  SU R                  -   SS9nUR                  SU R                  -   SS9nUS   USS  nnUS   USS  nnUS   USS  nnUS   USS  nnUbG  [        U[        5      (       a  UR                  nOUnUR                  UUU R                   S	U	05      u  nnUSU R                  -   -  n["        R$                  " S
UUR'                  SS5      5      nU R)                  UUX5      n UU -   nUb  UU-   n[+        USU R,                  S9R/                  U5      n!Ubw  UR                  5       U R                  4:X  d&   SU R                  4 SUR                  5        35       eUR1                  SSSS5      U!R1                  XR                  SU5      -  n![2        R4                  R7                  U!U R8                  U R:                  S9n!["        R$                  " S
U!U5      n"U"R'                  SS5      R                  U
SUU5      n"U R=                  U"5      n"["        R>                  " US5      R1                  XR                  U R                  UU R                  5      n#["        R>                  " U V$s/ sH  n$["        R@                  " UU$/S5      PM     sn$S5      n%["        R>                  " USS9n&["        R@                  " U V's/ sH+  n'["        R@                  " UU'/S5      RC                  S5      PM-     sn'S5      n(["        R$                  " SU#U%45      n)U RE                  U&U)X5      n*U)U*-   n)Ub5  URG                  SSSSS5      nURI                  U)RJ                  5      nU)U-   n)[+        U)SU R,                  S9R/                  U)5      n+Ub]  UR                  5       U R                  4:X  d&   SU R                  4 SUR                  5        35       eUR1                  SSSSS5      U+-  n+[2        R4                  R7                  U+U R8                  U R:                  S9n+["        R$                  " SU+U(R'                  SS5      45      n,U,R'                  SS5      n,U,R                  XR                  UU5      n,U R=                  U,5      n,["        R@                  " U"U,/S5      R1                  U
SU5      n-U!R1                  XR                  US5      n![2        R4                  R7                  U-U R6                  U R:                  S9n-U-U!U+4$ s  sn$f s  sn'f )Nz#`hidden_states` should be of shape r   r   rL   r   r   r'   r   r   zbntc,bncs->bntsr	   )r   r!   r   r   r   zbnhtc,bnhsc->bnhts   zbnhts,bnhsc->bnhtc)&rO   r   r   r   r   r   r   r   r   r   chunkr2   r   r   r   r   r   r   r   r    get_main_relative_pos_embeddingsr   r!   r   r   r   r   r   r   r   r   stackr0   rM   #get_predict_relative_pos_embeddingspermutetor   ).rm   r   r   r   r   extended_predict_attention_maskrS   rT   rP   r   r   ngram_sequence_lengthr   r   r   r   
proj_shapehidden_states_listquery_states_listkey_states_listvalue_states_listmain_hidden_stateshidden_states_predict_listmain_query_statespredict_query_states_listmain_key_statespredict_key_states_listmain_value_statespredict_value_states_listr   r1   main_attn_weightsmain_relative_pos_embeddingsmain_attn_probsmain_attn_outputpredict_query_stateskeypredict_key_statespredict_hidden_statesv_ppredict_value_statespredict_attn_weightspredict_relative_pos_embeddingspredict_attn_probspredict_attn_outputr  s.                                                 r"   r   $ProphetNetNgramSelfAttention.forwarda  s
    :G9K9K9M6
;M&&()jQ\-]] 	
1*U`2`1a b##$&	
] }5]]=1
}5 $}}c'9: {{<
S[[R<
{{<Z@ "5"5r4==I
#++Z8''4
#++Z8 +00TZZQ0G(..q4::~1.E$**1tzz>q*A(..q4::~1.E9KA9NPbcdcePf67H7KM^_`_aMb43B13EWXWYGZ07H7KM^_`_aMb4 %.*=>>&4&I&I#&4#1D1K1K!2DNNEUWeDf2.O.
 0A

NC "LL):<MOhOhijlmOno (,'L'L 1<(
$ .0LL% 1N B!
 '#
$	 	 &"'')d.A.A-CC A4CVCVBXAY Z#((*+-C .221b!Q?/BVBV//_C O --//4CYCYdhdqdq/r
 !<<(9?L]^+55a;CCJPQSbdop==)9:  %{{+DaHMM

D$7$7$-- 

 #[[Zq)rZqSV%))_c4JA*NZq)rtuv !&,FA N  %yyLefLeSUYY)3/3==a@Lefhi 
  %||,@CWYkBlm +/*R*R!#7+
'
  46UU*6.M.U.UVWYZ\]_`bc.d+.M.P.PQeQkQk.l+#7:Y#Y $ 
 '&
'	 	 &"'')d.A.A-CC A4CVCVBXAY Z#((*+-C "1!5!5aB1!EHZ!Z]]22$"8"84== 3 
 $ll #57K7U7UVWYZ7["\
 2;;AqA199*jjRacno"mm,?@ ii!13F GKPPQ[]_alm)..z;N;NP_acdmm++K4<<RVR_R_+`O-???I *s gs   "Y1Y c                    UR                   u  pVpxUR                  XVXx5      nUc  UR                   S S u  pY[        R                  " SUR                   S   S-   5      R	                  S5      R	                  S5      R                  XYS5      R                  UR                  5      n
XR	                  S5      R                  XYS5      -
  n
[        U R                  U R                  U
S5      nU R                  U5      nUR                  UR                   S S U R                  U R                  4-   5      nUR                  SSSS5      nUR                  UR                   S S S-   5      nUR                  SU R                  S5      nUR                  SUR                   S   5      nUR                  5       nUR                  SUR!                  S5      5      n[        R"                  " USUS9nUR                  XVUS5      nU$ )	Nr'   r   rL   r   Fr	   )rL   r   index)r   r   r   arangerM   rN   r,  r%   rJ   rA   r  r  r   r+  r   r   rO   gather)rm   r   r   rP   rS   r   r   r   r   r1   rC   rel_pos_embeddingsr=  s                r"   r(  =ProphetNetNgramSelfAttention.get_main_relative_pos_embeddings  s    8D7I7I4
G#((WV)1*7*=*=bq*A'JQ 2 22 6 :;11
Q7L''(  "46L6LQ6O6V6VWatu6v!v-E  $"<"<>PRW.*
 "99-H/44$$Ra(D,<,<d>Q>Q+RR
 0771aC/778J8J2A8NQV8VW)G)N)NqRVReRegh)i&)G)L)L.44R8*
& *H)L)L)N&/77<N<S<STV<WX',||4FAUs't$'C'H'Helnp'q$++r#   c                     UR                   SS u  pVUc  UR                   S   nUS   S   US-
  :X  d   S5       e[        R                  " SU5      R                  S5      R                  S5      R	                  XVS5      R                  UR                  5      nXR                  S5      R	                  XVS5      -
  n[        U R                  U R                  US5      nUR                  SS5      nU R                  U5      n	U	R                  UR                   S S U R                  U R                  4-   5      n	U	R                  SSSSS5      n	U	R                  SU R                  5      n	UR                  S5      nUR	                  U R                   SU R                  S5      nUR                  SUR#                  S5      5      R%                  5       n[        R&                  " U	SUS	9n
U
R                  XPR                   U R                  US5      n
U
$ )
Nr   r'   rL   r   zb`position_ids` are incorrect. They should be of the format 1 2 3 4 5 ... (key_sequence_length - 1)Fr&  r	   rL  )r   r   rN  rM   rN   r,  r%   rJ   rA   r  r   r  r   r   r+  r   r2   rO   r   rO  )rm   r   r   rP   rT   r   r1   key_sequence_lengthrC   rP  rG  s              r"   r*  @ProphetNetNgramSelfAttention.get_predict_relative_pos_embeddingsB  s     '4&9&9!A&>#
,4"."4"4R"8?1%)<q)@@ t@ Q 3411
Q7L''(  "46L6LQ6O6V6VWatu6v!v0H  $"<"<>PRW1-
 &//15!99-H 044$(8(8$:M:M'NN
 0771aAF/77D<L<LM,M,W,WXY,Z),M,T,TJJ4..-
) -N,R,R166r:-

$& 	* +0,,A-N+
'
 +J*N*N

D$7$7"+
' /.r#   )r   r   r   r   r   r   r2   r   rA   r!   r   r   r  r  r   r   )NNNNNNNN)rr   rs   rt   ru   r   r   r   r#  r   ry   r   r   r(  r*  r{   r   r   s   @r"   r  r  <  sg     /    <q 37(,'+*.r@ !v/r@h+,Z9/ 9/r#   r  c                   H   ^  \ rS rSrSrS\4U 4S jjr SS\4S jjrSr	U =r
$ )	ProphetNetEncoderLayeri~  z
Encoder block for Prophetnet
r   c                    > [         TU ]  5         [        XR                  5      U l        [        UR                  5      U l        [        XR                  5      U l
        [        UR                  5      U l        g r   )r   r   r   num_encoder_attention_heads	self_attnr   r   self_attn_layer_normr  encoder_ffn_dimfeed_forwardfeed_forward_layer_normr   s     r"   r   ProphetNetEncoderLayer.__init__  s[    ,V5W5WX$-f.@.@$A! 2&:P:PQ'01C1C'D$r#   r   c                     U R                  UUUUS9u  pVU R                  XQ-   5      nU R                  U5      nU R                  Xq-   5      nU4nU(       a  X4-  nU$ )N)r   r   r   r   )rY  rZ  r\  r]  )	rm   r   r   r   r   attention_outputr   feed_forward_outputoutputss	            r"   r   ProphetNetEncoderLayer.forward  s~     *.')+/	 *8 *
& 112B2RS #//>445H5XY "&Gr#   )r\  r]  rY  rZ  F)rr   rs   rt   ru   rv   r   r   r  r   r{   r   r   s   @r"   rV  rV  ~  s0    E/ E #(
   r#   rV  c                      ^  \ rS rSrSrS
S\4U 4S jjjr             SS\\   S\\   S\\	R                     4S jjrS	rU =r$ )ProphetNetDecoderLayeri  z
Decoder block for Prophetnet
r   c                 b  > [         TU ]  5         [        XS9U l        [	        UR
                  5      U l        UR                  (       a3  [        XR                  US9U l
        [	        UR
                  5      U l        [        XR                  5      U l        [	        UR
                  5      U l        g )Nr   )r   r   r  rY  r   r   rZ  add_cross_attentionr   r  
cross_attncross_attn_layer_normr  decoder_ffn_dimr\  r]  r  s      r"   r   ProphetNetDecoderLayer.__init__  s    5fR$-f.@.@$A! %%1&:\:\hqrDO)263E3E)FD& 2&:P:PQ'01C1C'D$r#   	use_cacher   r   c                 .   U R                  UUUUUUU	U
S9u  nnnU R                  X-   5      nS nUb+  U R                  UUUUUUS9u  nnU R                  UU-   5      nU R	                  U5      nU R                  UU-   5      nU4nU(       a  UUUU4-  nU$ )N)r   r   r   r   r-  rS   rT   rP   )r   r   r   r   r   r   )rY  rZ  rj  rk  r\  r]  )rm   r   r   rd   encoder_attn_maskr   cross_attn_layer_head_maskr-  rS   rT   rP   r   rn  r   r   ngram_attention_outputself_attn_weightsself_attn_weights_ngramcross_attn_weightsr`  ra  rb  s                         r"   r   ProphetNetDecoderLayer.forward  s    $ NR^^'))+,K+I.O% N\ 	N
J 13J 11-2XY! ,37??+!60 :-"3 4C 400 !667G-7WXM #//>445H=5XY ")+BDVWWGr#   )rj  rk  r\  r]  rY  rZ  r   )NNNNNNNNNNTFN)rr   rs   rt   ru   rv   r   r   r   r  r   r   r   r{   r   r   s   @r"   rf  rf    s    E/ E E$ "#'(,'+*.$(,1154 D>4 $D>4 !.4 4r#   rf  z=
    The standalone encoder part of the ProphetNetModel.
    c                   "  ^  \ rS rSrSS\S\R                  4U 4S jjjrS rS r	\
       SS\\R                     S\\R                     S	\\R                     S
\\R                     S\\   S\\   S\\   S\\\4   4S jj5       rSrU =r$ )ProphetNetEncoderi  r   word_embeddingsc                   > [         TU ]  U5        Ub  UO3[        R                  " UR                  UR
                  UR                  S9U l        [        U5      U l	        [        UR
                  5      U l        [        R                  " [        UR                  5       Vs/ sH  n[        U5      PM     sn5      U l        SU l        U R%                  5         gs  snf )  
word_embeddings (`torch.nn.Embeddings` of shape `(config.vocab_size, config.hidden_size)`, *optional*):
    The word embedding parameters. This can be used to initialize [`ProphetNetEncoder`] with pre-defined word
    embeddings instead of randomly initialized word embeddings.
Nr   F)r   r   r   r   
vocab_sizer   r   ry  r   position_embeddingsr   embeddings_layer_norm
ModuleListr-   num_encoder_layersrV  r   gradient_checkpointing	post_init)rm   r   ry  _r   s       r"   r   ProphetNetEncoder.__init__  s     	  * f//1C1CQWQdQde 	
 $B&#I %.v/A/A%B"mmUSYSlSlMm$nMm%;F%CMm$no&+#	 %os    Cc                     U R                   $ r   ry  rl   s    r"   get_input_embeddings&ProphetNetEncoder.get_input_embeddings      ###r#   c                     Xl         g r   r  rm   values     r"   set_input_embeddings&ProphetNetEncoder.set_input_embeddings      $r#   r   r   	head_maskinputs_embedsr   output_hidden_statesreturn_dictr   c                     Ub  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nUc  Uc  [	        S5      eUb  Ub  [	        S5      eUb  Uc  U R                  U5      nUb}  SUSS2SSSS24   R                  SU R                   R                  SS5      -
  [        R                  " U R                  5      R                  -  nUR                  UR                  5      nOSnU R                  UR                  SS UR                  5      u  pXI-   nU R!                  U5      n["        R$                  R'                  XR                   R&                  U R(                  S9nU(       a  SOSnU(       a  SOSnUb\  UR+                  5       S	   [-        U R.                  5      :X  d2   S
[-        U R.                  5       SUR+                  5       S	    S35       e[1        U R.                  5       H8  u  pU(       a  X4-   nU" UUUb  X>   OSUS9nUS	   nU(       d  M/  UUS   4-   nM:     U(       a  X4-   nU(       d  [3        S XU4 5       5      $ [5        XUS9$ )a  
Example:

```python
>>> from transformers import AutoTokenizer, ProphetNetEncoder
>>> import torch

>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/prophetnet-large-uncased")
>>> model = ProphetNetEncoder.from_pretrained("patrickvonplaten/prophetnet-large-uncased-standalone")
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)

>>> last_hidden_states = outputs.last_hidden_state
```Nz3Either input_ids or inputs_embeds has to be passed.z2Make sure to only pass input_ids or inputs_embeds.      ?r   r'   r   rq   r   z&The head_mask should be specified for  layers, but it is for .)r   r   r   c              3   ,   #    U H  oc  M  Uv   M     g 7fr   rq   .0vs     r"   	<genexpr>,ProphetNetEncoder.forward.<locals>.<genexpr>h  s     l$Zq$Z   	)r   r   r   )r   r   r  use_return_dictr   ry  rN   rX  r   r)   r   r*   r,  r~  r   r%   r  r   r   r   r   rO   lenr   	enumeratery   r   )rm   r   r   r  r  r   r  r  extended_attention_maskr~  rP   r   rd   all_attentionsidxencoder_layerlayer_outputss                    r"   r   ProphetNetEncoder.forward  s   4 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]!6RSS"}'@QRR"}'< 00;M %nQdA%56==aAhAhjkmnooDJJ'++',# '>&@&@ATAT&U#&*#,0,D,D]EXEXY[Z[E\^k^r^r,s)%;22=A--m{{?R?R]a]j]j-k&:0d  >>#A&3t{{+;< 8T[[9I8JJabkbpbpbrstbuavvwx< #,DKK"8C#(=@P(P%)63<3Hd"3	M *!,M  !/=3C2E!E #9   $9<L$L!l]>$Zlll+]k
 	
r#   )r  r  r   r~  ry  r   )NNNNNNN)rr   rs   rt   ru   r   r   r   r   r  r  r   r   r   r   r  r   ry   r   r   r{   r   r   s   @r"   rx  rx    s    / ",,  ,$%  -115,004,0/3&*T
ELL)T
 !.T
 ELL)	T

  -T
 $D>T
 'tnT
 d^T
 
uo%	&T
 T
r#   rx  z=
    The standalone decoder part of the ProphetNetModel.
    c                      ^  \ rS rSrSS\S\\R                     4U 4S jjjrS r	S r
\             SS\\R                     S\\R                     S	\\R                     S
\\R                     S\\R                     S\\R                     S\\\\R                           S\\R                     S\\   S\\   S\\   S\\   S\\R                     S\\\4   4S jj5       rS rS rS rSrU =r$ )ProphetNetDecoderin  r   ry  c           
        > [         TU ]  U5        UR                  U l        UR                  U l        UR                  U l        UR
                  U l        UR                  U l        Ub  UO3[        R                  " UR                  UR                  UR                  S9U l        [        U5      U l        [        R                  " U R                  UR                  S5      U l        [        R"                  " [%        UR&                  5       Vs/ sH  n[)        XS9PM     sn5      U l        [-        UR                  5      U l        SU l        U R3                  5         gs  snf )r{  Nr|  rh  F)r   r   r2   rA   r  r   r   max_target_positionsr   r   r}  r   r   ry  r   r~  ngram_embeddingsr  r-   num_decoder_layersrf  r   r   r  r  r  )rm   r   ry  ir   s       r"   r   ProphetNetDecoder.__init__t  s    	 \\
!--%+%A%A"~~$*$B$B! * f//1C1CQWQdQde 	
 $B&#I  "TZZ9K9KT RmmBGHaHaBbcBbQ#F8Bbc
 &/v/A/A%B"&+# ds   Ec                     U R                   $ r   r  rl   s    r"   r  &ProphetNetDecoder.get_input_embeddings  r  r#   c                     Xl         g r   r  r  s     r"   r  &ProphetNetDecoder.set_input_embeddings  r  r#   r   r   rd   encoder_attention_maskr  cross_attn_head_maskr]   r  rn  r   r  r  r   r   c                    U	b  U	OU R                   R                  n	U
b  U
OU R                   R                  n
Ub  UOU R                   R                  nUb  UOU R                   R                  nUc  Uc  [        S5      eUb  Ub  [        S5      eUb  Uc  U R                  U5      nUR                  SS u  pU R                  (       a/  U R                  (       a  U	(       a  [        R                  S5        Sn	SnU	(       aB  [        U[        5      (       d-  [        R                  S5        Sn[        R                  " U5      nUb  UR!                  5       OS	nU R#                  X4UR$                  US
9u  nnUS	:w  a  Su  nnOU R'                  U5      u  nnU R"                  R)                  US-   5      nUU-   nU R*                  R,                  nUS	:w  a_  UR/                  S5      S:X  d   S5       e[1        U R2                  5       Vs/ sH  nUUS-
     U-   R5                  USS5      PM!     nnSnSnOQ[1        U R2                  5       Vs/ sH  nUUS-
     U-   PM     nnU R7                  UU5      nU R9                  UU5      nUb}  SUSS2SSSS24   R5                  SU R                   R:                  SS5      -
  [<        R>                  " U R@                  5      RB                  -  nURE                  UR@                  5      nOSn[<        RF                  " U/U-   S5      nU RH                  (       a  U RI                  U5      n[J        RL                  RO                  UU RN                  U R                  S9nU(       a  SOSnU(       a  U R                   R2                  S	:  a  SOSnU
(       a  SOSn U
(       a  SOSn!U
(       a  U R                   RP                  (       a  SOSn"[S        XV/SS/5       Hj  u  n#n$U#c  M  U#R/                  5       S	   [U        U RV                  5      :X  a  M7   SU$ S[U        U RV                  5       SUR/                  5       S	    S35       e   [Y        U RV                  5       H  u  n%n&U(       a8  UUSS2SU24   4-  nU R                   R2                  S	:  a  UUSS2US24   4-  nU&" UUUUUb  UU%   OSUb  UU%   OSUUUUUU	U
US9n'U'S	   nU
(       d  Mv  U U'S   4-  n U!U'S   4-  n!U R                   RP                  (       d  M  U"U'S   4-  n"M     U(       a8  UUSS2SU24   4-  nU R                   R2                  S	:  a  UUSS2US24   4-  nU(       a  UR[                  5       nUSS2SU24   n(U R                   R2                  S	:  a  USS2US24   OSn)U(       d  []        S U(U)UUUU U!U"4 5       5      $ [_        U(U)UUUU U!U"S9$ s  snf s  snf )a  
cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
    Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.

Example:

```python
>>> from transformers import AutoTokenizer, ProphetNetDecoder
>>> import torch

>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/prophetnet-large-uncased")
>>> model = ProphetNetDecoder.from_pretrained("microsoft/prophetnet-large-uncased", add_cross_attention=False)
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)

>>> last_hidden_states = outputs.last_hidden_state
```NzGEither `decoder_input_ids` or `decoder_inputs_embeds` has to be passed.zFMake sure to only pass `decoder_input_ids` or `decoder_inputs_embeds`.r'   zZ`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...FzPassing a tuple of `past_key_values` is deprecated and will be removed in Transformers v4.58.0. You should pass an instance of `EncoderDecoderCache` instead, e.g. `past_key_values=EncoderDecoderCache.from_legacy_cache(past_key_values)`.Tr   )r%   r]   )NNr   zOAt the moment `use_cache` is only supported for `decoder_input_ids` of length 1r  r   rq   r  r  zThe `z` should be specified for r  r  )rp  r   rq  r-  rS   rT   rP   r   rn  r   r   r	   c              3   .   #    U H  nUc  M  Uv   M     g 7fr   rq   r  s     r"   r  ,ProphetNetDecoder.forward.<locals>.<genexpr>T  s"      	A  	s   	)r   r   r]   r   r   r   r   rb   )0r   rn  r   r  r  r   ry  r   r  r   loggerwarning_oncer   r   r   from_legacy_cacher   r~  r%   !compute_buffered_relative_bucketsr   r  r   rO   r-   r2   rN   prepare_attention_maskprepare_predict_attention_maskr  r   r)   r   r*   r,  r0   r  r   r   r   ri  zipr  r   r  to_legacy_cachery   r   )*rm   r   r   rd   r  r  r  r]   r  rn  r   r  r  r   r   r1   return_legacy_cachepast_key_values_lengthmain_stream_pos_embedrP   rS   rT   predicting_stream_pos_embedr   r  r2   ngram_hidden_statesr  r-  extended_encoder_attention_maskall_main_stream_hidden_statesall_ngram_stream_hidden_statesall_main_stream_attnsall_ngram_stream_attnsall_cross_attns	attn_mask	mask_namer  decoder_layerr  r   r   s*                                             r"   r   ProphetNetDecoder.forward  s   J "+!6IDKK<Q<Q	1B1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]!6fgg"}'@eff"}'< 00;M&3&9&9"1&=#
&&4==##p "	#Z??\
 #'1CCOTOETE`!?!?!Afg.2.F.F) ''+ /G /
+| "Q&PZM*,M
 66|D.1&*&>&>&G&GWXHX&Y# &(==0077 "Q& %%a(A- a- #4::.#.E "%!),/JJRRS]_`bcd.   # '+#.2+ Z__c_i_iYj#YjPU!%!),/JJYj   # '+&A&A-Q_&`#.2.Q.QR_ao.p+ "-,QdA-=>EEaIpIprsuvwwDJJ'++/,+ /N.P.PQ^QdQd.e+.2+		=/4G"GK%% 66}EM--mt||VZVcVc-d /C%/CHYHY\]H]cg&&7T'8d 1dkk6U6U"[_ %((IKYoKp$q Iy$ ~~'*s4;;/?@ I;&@T[[AQ@R S!(+,A/@ %r #,DKK"8C#--CSOCS@S2T1VV-;;$$q(2}QHXEX7Y6[[2)'%"A3<3H3dI]Ii,@,Eos0O/M2S).#"3-M" *!,M  %-*:)<<%&=+;*==&;;222#a(8'::O= #9@  )mA?O?O<O.P-RR){{  1$.=ODTAT3U2WW.-==?O *!-=o-=*=>HLHYHY\]H]-?3C0C"Dcg  &+#12)*#	   ,/$;+7 >,3,	
 		
O##s   %W W%c           	         UR                   u  p#[        R                  " SU R                  5      R	                  UR
                  5      R                  SS5      n[        U R                  U R                  U5      u  pEUS S 2S U2S U24   R                  USS5      n[        R                  " US S 2S U2S U24   US S 2S U2U R                  U R                  U-   24   /S5      R                  USS5      nXE4$ r  )r   r   rN  r  r,  r%   rN   rU   rA   r  r0   )rm   rP   r   r1   main_relative_bucketspredict_relative_bucketss         r"   r  3ProphetNetDecoder.compute_buffered_relative_bucketsm  s   &2&8&8#
||At'@'@ADD\EXEXY``abdef:]d88,;
7
 !6a9I/9IK[OK[6[ \ c cdnpqst u#(99(,<_,<>N>N)NO('')B)BTE^E^apEp)pp $
 &Q
" 	! %>>r#   c                 H   UR                   S S u  p4[        R                  " XD4[        R                  " UR                  5      R
                  UR                  UR                  S9n[        R                  " US5      nUS U2S U24   S S S S 2S S 24   R                  X0R                  R                  4UR                   -   5      nUb@  SUS S 2S S S S 24   -
  [        R                  " U R                  5      R
                  -  nXg-   nOUnUR                  UR                  5      $ )Nr'   r   r   r  )r   r   fullr)   r   r*   r%   triuexpandr   r  r,  )rm   r   r   r   
seq_lengthcausal_maskextended_causal_maskr  s           r"   r  (ProphetNetDecoder.prepare_attention_mask  s   !.!4!4Ra!8
 jj$KK++,00%% ''	
 jja0*;J;+CDT4QRTUEUV]]@@AKDUDUU 

 %'*^AtT1<L-M'MQVQ\Q\]a]g]gQhQlQl&l#&:&T#&:#&))-*=*=>>r#   c           	         UR                   S S u  p4[        U R                  U R                  UR                  UR
                  5      n[        R                  " US S 2S U2S U24   US S 2S U2U R                  U R                  U-   24   /SS9nUS S S S 2S S 2S S 24   R                  X0R                  R                  4UR                   -   5      nUb  SUS S 2S S S S S 24   -
  [        R                  " U R
                  5      R                  -  nUR                  X0R                  R                  U R                  XD45      n[        R                  " U[        R                  " U5      /SS9nXg-   nOUnUR                  UR
                  5      $ )Nr'   rL   r   r  )r   r6   r  r2   r%   r   r   r0   r  r   r  r)   r*   r9   r,  )	rm   r   r   r   r  predict_causal_maskextended_predict_causal_maskr  r-  s	            r"   r  0ProphetNetDecoder.prepare_predict_attention_mask  s   !.!4!4Ra!8
 3%%tzz=3G3GI\I\
 $ii#A{
{KZK$?@#{
{D$=$=@Y@Y\f@f$ff 
 (;4q!Q;N'O'V'V@@ADWD]D]](
$
 %'*^AtT4QR<R-S'SW\WbWbcgcmcmWnWrWr&r#&=&D&D[[DDdjjR\i'# ',ii(%*:*:;R*STZ\'# /K.d+.J+.11-2E2EFFr#   )r   r  r  r   r  r2   r  rA   r~  r  ry  r   NNNNNNNNNNNNN)rr   rs   rt   ru   r   r   r   r   r   r  r  r   r   r   ry   r  r   r   r   r  r  r  r{   r   r   s   @r"   r  r  n  s   / (2<<BX  >$%  -1158<9=,07;@D04$(,0/3&*15Q
ELL)Q
 !.Q
  (5	Q

 !) 6Q
 ELL)Q
 'u||4Q
 "%ell(;"<=Q
  -Q
 D>Q
 $D>Q
 'tnQ
 d^Q
 !.Q
 
u22	3Q
 Q
f?,?0!G !Gr#   r  c            &       $  ^  \ rS rSrSS/rS\4U 4S jjrS rS rS r	S	 r
S
 r\                SS\\R                     S\\R                     S\\R                     S\\R                      S\\R                     S\\R                     S\\R                     S\\   S\\\\R                           S\\R                     S\\R                     S\\   S\\   S\\   S\\   S\\R                     S\\\4   4"S jj5       rSrU =r$ )ProphetNetModeli  encoder.word_embeddings.weightdecoder.word_embeddings.weightr   c                   > [         TU ]  U5        [        R                  " UR                  UR
                  UR                  S9U l        [        R                  " U5      nSUl
        SUl        [        X R                  5      U l        [        R                  " U5      nSUl        SUl        [        X0R                  5      U l        U R#                  5         g )Nr|  FT)r   r   r   r   r}  r   r   ry  copydeepcopyrn  tie_encoder_decoderrx  encoder
is_decoderr  decoderr  )rm   r   encoder_configdecoder_configr   s       r"   r   ProphetNetModel.__init__  s     !||F,=,=v?Q?Q_e_r_rsv.#( -2*(9M9MNv.$(!-2*(9M9MN 	r#   c                     U R                   $ r   r  rl   s    r"   r  $ProphetNetModel.get_input_embeddings  r  r#   c                 |    Xl         U R                   U R                  l         U R                   U R                  l         g r   )ry  r  r  r  s     r"   r  $ProphetNetModel.set_input_embeddings  s,    $'+';';$'+';';$r#   c                     U R                   R                  (       aa  U R                  U R                  R                  U R                  5        U R                  U R
                  R                  U R                  5        g g r   )r   tie_word_embeddings_tie_or_clone_weightsr  ry  r  rl   s    r"   _tie_weightsProphetNetModel._tie_weights  sT    ;;**&&t||'C'CTEYEYZ&&t||'C'CTEYEYZ +r#   c                     U R                   $ r   )r  rl   s    r"   get_encoderProphetNetModel.get_encoder      ||r#   c                     U R                   $ r   r  rl   s    r"   get_decoderProphetNetModel.get_decoder  r  r#   r   r   decoder_input_idsdecoder_attention_maskr  decoder_head_maskr  encoder_outputsr]   r  decoder_inputs_embedsrn  r   r  r  r   r   c                 `   Ub  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nUc  U R                  UUUU
UUUS9nU R                  UUUS   UUUU	UUUUUUS9nU(       d  UU-   $ [        UR                  UR                  UR                  UR                  UR                  UR                  UR                  UR                  UR                  UR                  UR                  S9$ )aW  
decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
    Indices of decoder input sequence tokens in the vocabulary.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are decoder input IDs?](../glossary#decoder-input-ids)

    ProphetNet uses the `eos_token_id` as the starting token for `decoder_input_ids` generation. If
    `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see
    `past_key_values`).
decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
    Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
    be used by default.
cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
    Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.

Example:

```python
>>> from transformers import AutoTokenizer, ProphetNetModel

>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/prophetnet-large-uncased")
>>> model = ProphetNetModel.from_pretrained("microsoft/prophetnet-large-uncased")

>>> input_ids = tokenizer(
...     "Studies have been shown that owning a dog is good for you", return_tensors="pt"
... ).input_ids  # Batch size 1
>>> decoder_input_ids = tokenizer("Studies show that", return_tensors="pt").input_ids  # Batch size 1
>>> outputs = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids)

>>> last_hidden_states = outputs.last_hidden_state  # main stream hidden states
>>> last_hidden_states_ngram = outputs.last_hidden_state_ngram  # predict hidden states
```)r   r   r  r  r   r  r  r   )r   r   rd   r  r  r  r]   r  r   r  rn  r  r   )r   r   r]   r^   r_   r`   ra   rb   rc   rd   re   )r   rn  r   r  r  r  r  r}   r   r   r]   r   r   r   r   rb   )rm   r   r   r  r  r  r  r  r  r]   r  r  rn  r   r  r  r   decoder_outputss                     r"   r   ProphetNetModel.forward  sW   t "+!6IDKK<Q<Q	1B1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]""ll#-#+"3%9' + O ,,'1"1!"4#1'!5+//!5#) ' 
  "_44+-??$3$K$K+;;"1"?"?(7(K(K.99%4%E%E,==&5&G&G"1"?"?.99
 	
r#   )r  r  ry  )NNNNNNNNNNNNNNNN)rr   rs   rt   ru   _tied_weights_keysr   r   r  r  r  r  r  r   r   r   r   
BoolTensorry   r  r   r}   r   r{   r   r   s   @r"   r  r    s   :<\]/ "$<
[
  -11548=A,0487;+/@D048<$(,0/3&*15#j
ELL)j
 !.j
 $ELL1	j

 !))9)9 :j
 ELL)j
 $ELL1j
 'u||4j
 "%j
 "%ell(;"<=j
  -j
  (5j
 D>j
 $D>j
 'tnj
  d^!j
" !.#j
$ 
u22	3%j
 j
r#   r  zh
    The ProphetNet Model with a language modeling head. Can be used for sequence generation tasks.
    c            (       ~  ^  \ rS rSr/ SQrS\4U 4S jjrS rS r\	                 SS\
\R                     S\
\R                     S	\
\R                     S
\
\R                     S\
\R                     S\
\R                     S\
\R                     S\
\R                     S\
\\\R                           S\
\R                     S\
\R                     S\
\R                     S\
\   S\
\   S\
\   S\
\   S\
\R                     S\\\4   4$S jj5       rS S jrS\R                  4S jrS rS rSrU =r$ )!"ProphetNetForConditionalGenerationiU  )r  r  lm_head.weightr   c                   > [         TU ]  U5        [        U5      U l        UR                  U l        UR                  U l        [        R                  " UR                  UR                  SS9U l        U R                  5         g )NFr   )r   r   r  r   r   r   disable_ngram_lossr   r   r   r}  lm_headr  r   s     r"   r   +ProphetNetForConditionalGeneration.__init__]  sd     )&1!.."(";";yy!3!3V5F5FUS 	r#   c                     U R                   R                  (       a1  U R                  U R                  R                  U R
                  5        g g r   )r   r  r  r   ry  r  rl   s    r"   r  /ProphetNetForConditionalGeneration._tie_weightsh  s2    ;;**&&t'F'FU +r#   c                 .    U R                   R                  $ r   )r   ry  rl   s    r"   r  7ProphetNetForConditionalGeneration.get_input_embeddingsl  s    ...r#   r   r   r  r  r  r  r  r  r]   r  r  labelsrn  r   r  r  r   r   c                 z   Ub  UOU R                   R                  nUb  Uc  Uc  U R                  U5      nU R                  UUUUUUUUU	U
UUUUUUS9nUb  UR                  OUR                  SS u  nnUS   R                  UU R                   R                  US5      nU R                  U5      nUSS2S4   nU R                   R                  S:  a  USS2SS24   OSnUR                  5       (       d  UR                  5       nSnUb  U R                  UU5      nU(       d+  [        S UU4 5       5      nUb  U4U-   USS -   $ UUSS -   $ [        UUUUR                  UR                  UR                  UR                   UR"                  UR$                  UR&                  UR(                  UR*                  S9$ )	a  
decoder_input_ids (`torch.LongTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
    Indices of decoder input sequence tokens in the vocabulary.

    Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
    [`PreTrainedTokenizer.__call__`] for details.

    [What are decoder input IDs?](../glossary#decoder-input-ids)

    ProphetNet uses the `eos_token_id` as the starting token for `decoder_input_ids` generation. If
    `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see
    `past_key_values`).
decoder_attention_mask (`torch.BoolTensor` of shape `(batch_size, target_sequence_length)`, *optional*):
    Default behavior: generate a tensor that ignores pad tokens in `decoder_input_ids`. Causal mask will also
    be used by default.
cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
    Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
    Labels for computing the sequence classification/regression loss. Indices should be in `[-100, 0, ...,
    config.vocab_size - 1]`. All labels set to `-100` are ignored (masked), the loss is only computed for
    labels in `[0, ..., config.vocab_size]`

Example:

```python
>>> from transformers import AutoTokenizer, ProphetNetForConditionalGeneration

>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/prophetnet-large-uncased")
>>> model = ProphetNetForConditionalGeneration.from_pretrained("microsoft/prophetnet-large-uncased")

>>> input_ids = tokenizer(
...     "Studies have been shown that owning a dog is good for you", return_tensors="pt"
... ).input_ids  # Batch size 1
>>> decoder_input_ids = tokenizer("Studies show that", return_tensors="pt").input_ids  # Batch size 1
>>> outputs = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids)

>>> logits_next_token = outputs.logits  # logits to predict next token as usual
>>> logits_ngram_next_tokens = outputs.logits_ngram  # logits to predict 2nd, 3rd, ... next tokens
```N)r   r   r  r  r  r  r  r  r]   r  r  rn  r   r  r  r   r'   r   rL   r   c              3   ,   #    U H  oc  M  Uv   M     g 7fr   rq   r  s     r"   r  =ProphetNetForConditionalGeneration.forward.<locals>.<genexpr>       R*@Qqq*@r  )rZ   r[   r\   r]   r^   r_   r`   ra   rb   rc   rd   re   )r   r  r   r   r   r   r2   r  is_contiguousr  _compute_lossry   rX   r]   r^   r_   r`   ra   rb   rc   rd   re   )rm   r   r   r  r  r  r  r  r  r]   r  r  r  rn  r   r  r  r   rb  r   r1   predicting_streamspredict_logitsr[   r\   rZ   
all_logitss                              r"   r   *ProphetNetForConditionalGeneration.forwardo  s   ~ &1%<k$++B]B]"3";@U@] $ 1 1& 9//)/#9/!5++'"7/!5#)! " 
& (9'D##J_JeJefhghJi 	$
O %QZ__Z9J9JO]_`&891%040A0AA0E~ae,4 ##%%&&(F%%nf=DR6<*@RRJ9=9ID7Z''!"+5gz\cdedf\gOgg,) ' 7 7&-&C&C,3,O,O#*#=#=)0)I)I!(!9!9*1*K*K&-&C&C#*#=#= r#   c                    UR                  U R                  R                  UR                  S5      UR                  S5      5      R	                  U5      n[        U R                  R                  5       H'  nUS:  a  U R                  (       a    OX$US S 2S S 24'   M)     UR                  SS5      R                  5       n[        R                  R                  UR                  SUR                  S5      5      S[        R                  S9n[        R                  R                  XdR                  S5      SS9nU R                  R                   S:  a  UR#                  SSS	9* nUR%                  U5      R                  S5      n	X   nUR'                  5       nU R                  R                   UR                  S5      -  n
S
U R                  R                   -
  U-  X-  -   nU$ Nr   r   rL   r   r   )	reductionr   T)r   keepdimr  r   r   r2   rO   fill_r-   r  r   r  r   r   log_softmaxr   r   r   nll_lossepssumner   rm   r[   r  ignore_indexexpend_targetsr  lprobsrZ   smooth_lossnon_masked_tokenseps_is              r"   r  0ProphetNetForConditionalGeneration._compute_loss     ))$++*;*;V[[^V[[YZ^\bbcopt{{(()A1u00&,1a7# *
 !!!Q'224**KKFKKO,-- + 
 }}%%f.A.A".EQW%X;;??S !::"d:;;K . 1 1, ? D DR H%8K%**,KKKOOfkk"o5E$++//)T1E4GGDr#   c                 $    U R                  U5      $ r   )r   )rm   r  s     r"   %prepare_decoder_input_ids_from_labelsHProphetNetForConditionalGeneration.prepare_decoder_input_ids_from_labels  s      ((r#   c                 .    U R                   R                  $ r   )r   r  rl   s    r"   r  .ProphetNetForConditionalGeneration.get_encoder
      &&&r#   c                 .    U R                   R                  $ r   r   r  rl   s    r"   r  .ProphetNetForConditionalGeneration.get_decoder  r;  r#   r  r  r   r   )NNNNNNNNNNNNNNNNNr   )rr   rs   rt   ru   r	  r   r   r  r  r   r   r   r   r
  ry   r  r   rX   r   r  r7  r  r  r{   r   r   s   @r"   r  r  U  s    p	/ 	V/  -11548=A,0487;26@D048<)-$(,0/3&*15%yELL)y !.y $ELL1	y
 !))9)9 :y ELL)y $ELL1y 'u||4y "%,,/y "%ell(;"<=y  -y  (5y &y D>y $D>y  'tn!y" d^#y$ !.%y& 
u//	0'y yv8)ELL )'' 'r#   r  zt
    The standalone decoder part of the ProphetNetModel with a lm head on top. The model can be used for causal
    c                      ^  \ rS rSr/ SQrS\4U 4S jjrS rS rS r	S r
S	 r\             SS
\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S\\R                     S\\\\R                           S\\R                     S\\R                     S\\   S\\   S\\   S\\   S\\\4   4S jj5       rSS jr    SS jrSrU =r$ )ProphetNetForCausalLMi  )z!prophetnet.word_embeddings.weightz)prophetnet.decoder.word_embeddings.weightr  r   c                 N  > [         R                  " U5      nSUl        SUl        [        TU ]  U5        [        U5      U l        UR                  U l	        UR                  U l
        [        R                  " UR                  UR                  SS9U l        U R!                  5         g )NTFr  )r  r  r  is_encoder_decoderr   r   ProphetNetDecoderWrapperr   r   r   r  r   r   r   r}  r  r  r   s     r"   r   ProphetNetForCausalLM.__init__  s    v& $)! 26:!.."(";";yy!3!3V5F5FUS 	r#   c                 B    U R                   R                  R                  $ r   r   r  ry  rl   s    r"   r  *ProphetNetForCausalLM.get_input_embeddings-  s    &&666r#   c                 8    XR                   R                  l        g r   rH  r  s     r"   r  *ProphetNetForCausalLM.set_input_embeddings0  s    27/r#   c                     U R                   R                  (       a;  U R                  U R                  R                  R
                  U R                  5        g g r   )r   r  r  r   r  ry  r  rl   s    r"   r  "ProphetNetForCausalLM._tie_weights3  s;    ;;**&&t'>'>'N'NPTP\P\] +r#   c                 $    XR                   l        g r   r=  )rm   r  s     r"   set_decoder!ProphetNetForCausalLM.set_decoder7  s    ")r#   c                 .    U R                   R                  $ r   r=  rl   s    r"   r  !ProphetNetForCausalLM.get_decoder:  r;  r#   r   r   rd   r  r  r  r]   r  r  rn  r   r  r  r   c                    Ub  UOU R                   R                  nU R                  R                  UUUUUUUUU
UUUS9nUb  UR                  OUR                  SS u  nnUS   R                  XR                   R                  US5      nU R                  U5      nUSS2S4   nU R                   R                  S:  a  USS2SS24   OSnSnU	b  U R                  UU	5      nU(       d+  [        S UU4 5       5      nUb  U4U-   USS -   $ UUSS -   $ [        UUUUR                  UR                  UR                  UR                  UR                  UR                   S9	$ )	a  
cross_attn_head_mask (`torch.Tensor` of shape `(decoder_layers, decoder_attention_heads)`, *optional*):
    Mask to nullify selected heads of the cross-attention modules. Mask values selected in `[0, 1]`:

    - 1 indicates the head is **not masked**,
    - 0 indicates the head is **masked**.
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
    Labels for computing the left-to-right language modeling loss (next word prediction). Indices should be in
    `[-100, 0, ..., config.vocab_size]` (see `input_ids` docstring) Tokens with indices set to `-100` are
    ignored (masked), the loss is only computed for the tokens with labels n `[0, ..., config.vocab_size]`

Example:

```python
>>> from transformers import AutoTokenizer, ProphetNetForCausalLM
>>> import torch

>>> tokenizer = AutoTokenizer.from_pretrained("microsoft/prophetnet-large-uncased")
>>> model = ProphetNetForCausalLM.from_pretrained("microsoft/prophetnet-large-uncased")
>>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder."
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)

>>> logits = outputs.logits

>>> # Model can also be used with EncoderDecoder framework
>>> from transformers import BertTokenizer, EncoderDecoderModel, AutoTokenizer
>>> import torch

>>> tokenizer_enc = BertTokenizer.from_pretrained("google-bert/bert-large-uncased")
>>> tokenizer_dec = AutoTokenizer.from_pretrained("microsoft/prophetnet-large-uncased")
>>> model = EncoderDecoderModel.from_encoder_decoder_pretrained(
...     "google-bert/bert-large-uncased", "microsoft/prophetnet-large-uncased"
... )

>>> ARTICLE = (
...     "the us state department said wednesday it had received no "
...     "formal word from bolivia that it was expelling the us ambassador there "
...     "but said the charges made against him are `` baseless ."
... )
>>> input_ids = tokenizer_enc(ARTICLE, return_tensors="pt").input_ids
>>> labels = tokenizer_dec(
...     "us rejects charges against its ambassador in bolivia", return_tensors="pt"
... ).input_ids
>>> outputs = model(input_ids=input_ids, decoder_input_ids=labels[:, :-1], labels=labels[:, 1:])

>>> loss = outputs.loss
```N)r   r   rd   r  r  r  r]   r  rn  r   r  r  r'   r   rL   r   c              3   ,   #    U H  oc  M  Uv   M     g 7fr   rq   r  s     r"   r  0ProphetNetForCausalLM.forward.<locals>.<genexpr>  r  r  )	rZ   r[   r\   r]   r   r   r   r   rb   )r   r  r   r  r   r   r2   r  r  ry   r   r]   r   r   r   r   rb   )rm   r   r   rd   r  r  r  r]   r  r  rn  r   r  r  rb  r   r1   r  r  r[   r\   rZ   r   s                          r"   r   ProphetNetForCausalLM.forward=  s   B &1%<k$++B]B] //)))"7#9!5+'/!5# * 
 :C9NiooTaTgTghjijTk#
O$QZ__Z9J9JO]_`&891%040A0AA0E~ae,4%%nf=DR6<*@RRJ9=9ID7Z''!"+5gz\cdedf\gOgg,) ' 7 7%33$+$?$?"--!(!9!9!(!9!9
 
r#   c                    UR                  U R                  R                  UR                  S5      UR                  S5      5      R	                  U5      n[        U R                  R                  5       H'  nUS:  a  U R                  (       a    OX$US S 2S S 24'   M)     UR                  SS5      R                  5       n[        R                  R                  UR                  SUR                  S5      5      S[        R                  S9n[        R                  R                  XdR                  S5      SS9nU R                  R                   S:  a  UR#                  SSS	9* nUR%                  U5      R                  S5      n	X   nUR'                  5       nU R                  R                   UR                  S5      -  n
S
U R                  R                   -
  U-  X-  -   nU$ r#  r&  r-  s              r"   r  #ProphetNetForCausalLM._compute_loss  r5  r#   c                 r    Uc  UR                  UR                  5      nU(       a  US S 2SS 24   nUUUUUS.$ )NrL   )r   r   r  r]   rn  )new_onesr   )rm   r   r]   r   r  rn  kwargss          r"   prepare_inputs_for_generation3ProphetNetForCausalLM.prepare_inputs_for_generation  sL     !&//	@N!!RS&)I #,"."
 	
r#   r?  r  r@  )NNNN)rr   rs   rt   ru   r	  r   r   r  r  r  rO  r  r   r   r   r   ry   r  r   r   r   r  r\  r{   r   r   s   @r"   rB  rB    s   /  78^*'  -1158<9=,07;@D04)-$(,0/3&*lELL)l !.l  (5	l
 !) 6l ELL)l 'u||4l "%ell(;"<=l  -l &l D>l $D>l 'tnl d^l 
u//	0l l\> 
 
r#   rB  c                   @   ^  \ rS rSrSrS\4U 4S jjrS rS rSr	U =r
$ )rE  i  zx
This is a wrapper class, so that [`ProphetNetForCausalLM`] can correctly be loaded from pretrained prophetnet
classes.
r   c                    > [         TU ]  U5        [        R                  " UR                  UR
                  UR                  S9U l        [        XR                  S9U l	        U R                  5         g )Nr|  r  )r   r   r   r   r}  r   r   ry  r  r  r  r   s     r"   r   !ProphetNetDecoderWrapper.__init__  sV     !||F,=,=v?Q?Q_e_r_rs(AUAUV 	r#   c                 l    U R                  U R                  U R                  R                  5       5        g r   )r  ry  r  r  rl   s    r"   r  %ProphetNetDecoderWrapper._tie_weights  s%    ""4#7#79Z9Z9\]r#   c                 &    U R                   " U0 UD6$ r   r  )rm   argsr[  s      r"   r    ProphetNetDecoderWrapper.forward  s    ||T,V,,r#   )r  ry  )rr   rs   rt   ru   rv   r   r   r  r   r{   r   r   s   @r"   rE  rE    s%    
/ ^- -r#   rE  )r  rx  rB  r  r  r   rd  )=rv   r  r>   ri   dataclassesr   typingr   r   r   torch.utils.checkpointr   r   torch.nnr   activationsr
   cache_utilsr   r   
generationr   modeling_layersr   modeling_outputsr   modeling_utilsr   utilsr   r   r   configuration_prophetnetr   
get_loggerrr   r  r   r6   rJ   rU   rX   r}   r   r   r   r   r   Moduler   r  r  rV  rf  rx  r  r  r  rB  rE  __all__rq   r#   r"   <module>ru     s   Y    ! "     ! 5 ) 9 / - 9 9 6 
		H	%Q7" 6M. 
4% 4% 4%n 2%; 2% 2%j 
$@; $@ $@N 
,@ ,@ ,@^ #! #! #!L(-R\\ (-VB2")) B2JBII ./299 /D
(7 (VH7 HV 
r
1 r

r
j 
IG1 IG
IGX
 R
/ R
 R
j 
t')BO t'
t'n 
J
5 J

J
Z-8 -,r#   