
    <h                        S SK Jr  S SKJrJrJr  S SKrS SKJr  SSKJ	r	  SSK
Jr  SSKJrJr  SS	KJrJr  SS
KJrJrJrJr  SSKJr  \R2                  " \5      r\\" SS9 " S S\5      5       5       r\\" SS9 " S S\5      5       5       r " S S\R<                  5      r " S S\R<                  5      r  SJS\R<                  S\RB                  S\RB                  S\RB                  S\\RB                     S\"S\"4S jjr#S  r$ " S! S"\R<                  5      r%SKS#\RB                  S$\"S%\&S&\RB                  4S' jjr' " S( S)\R<                  5      r( " S* S+\R<                  5      r) " S, S-\5      r* " S. S/\R<                  5      r+S0\RB                  S1\,\RB                     S&\RB                  4S2 jr- " S3 S4\R<                  5      r. " S5 S6\R<                  5      r/ " S7 S8\R<                  5      r0 " S9 S:\R<                  5      r1 " S; S<\5      r2 " S= S>\5      r3 " S? S@\R<                  5      r4\ " SA SB\5      5       r5SC r6\ " SD SE\55      5       r7\" SFS9 " SG SH\55      5       r8/ SIQr9g)L    )	dataclass)CallableOptionalUnionN)nn   )ACT2FN)GradientCheckpointingLayer)BaseModelOutputImageClassifierOutput)ALL_ATTENTION_FUNCTIONSPreTrainedModel)ModelOutputauto_docstringcan_return_tuplelogging   )VJEPA2ConfigzO
    VJEPA Predictor outputs that also contains the masked encoder outputs
    )custom_introc                       \ rS rSr% Sr\R                  \S'   Sr\	\R                     \S'   Sr
\	\\R                  S4      \S'   Sr\	\\R                  S4      \S'   Sr\	\R                     \S	'   S
rg)$VJEPA2WithMaskedInputPredictorOutput    a  
masked_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*, returned when `context_mask` is provided which is applied on VJEPA2Encoder outputs):
    The masked hidden state of the model.
target_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*, returned when `target_mask` is provided which is applied on VJEPA2Encoder outputs):
    The target hidden state of the model.
last_hidden_stateNmasked_hidden_state.hidden_states
attentionstarget_hidden_state )__name__
__module____qualname____firstlineno____doc__torchFloatTensor__annotations__r   r   r   tupler   r   __static_attributes__r       b/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/vjepa2/modeling_vjepa2.pyr   r       s     (((7;%"3"34;=AM8E%"3"3S"89:A:>Ju00#567>7;%"3"34;r)   r   zs
    VJEPA outputs that also contains the masked encoder outputs
    Optionally contains the predictor outputs
    c                      ^  \ rS rSr% Sr\R                  \S'   Sr\	\R                     \S'   Sr
\	\\R                  S4      \S'   Sr\	\\R                  S4      \S'   Sr\	\   \S	'   U 4S
 jrSrU =r$ ) VJEPA2WithMaskedInputModelOutput5   a]  
masked_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*, returned when `context_mask` is provided which is applied on VJEPA2Encoder outputs):
    The masked hidden state of the model.
predictor_output (`VJEPA2WithMaskedInputPredictorOutput`, *optional*):
    The output from the Predictor module.
r   Nr   .r   r   predictor_outputc                    > [        [        TU ]	  5       5      n[        US   [        5      (       a  US   R                  5       US'   [        U5      $ )N)listsuperto_tuple
isinstancer   r'   )selfoutput	__class__s     r*   r3   )VJEPA2WithMaskedInputModelOutput.to_tupleJ   sG    eg&()fRj"FGG,,.F2JV}r)   r   )r   r    r!   r"   r#   r$   r%   r&   r   r   r   r'   r   r.   r   r3   r(   __classcell__r7   s   @r*   r,   r,   5   s     (((7;%"3"34;=AM8E%"3"3S"89:A:>Ju00#567>GKhCDK r)   r,   c                      ^  \ rS rSrSr SS\S\4U 4S jjjr\S 5       r	S\
R                  S\
R                  4S	 jrS
rU =r$ )VJEPA2PatchEmbeddings3DQ   z
Image to Patch Embedding
confighidden_sizec                 B  > [         TU ]  5         UR                  U l        UR                  U l        X l        [
        R                  " UR                  UUR                  UR                  UR                  4UR                  UR                  UR                  4S9U l        g )N)in_channelsout_channelskernel_sizestride)	r2   __init__
patch_sizetubelet_sizer?   r   Conv3din_chansprojr5   r>   r?   r7   s      r*   rE    VJEPA2PatchEmbeddings3D.__init__V   s    
 	 ++"//&II$,,f.?.?ARARS''):):F<M<MN	
	r)   c                     U R                   U R                  -  U R                  U R                  -  -  U R                  U R                  -  -  $ Nframes_per_cliprG   	crop_sizerF   r>   s    r*   num_patches#VJEPA2PatchEmbeddings3D.num_patchesg   sO     ##v':'::6#4#4466#4#446	
r)   pixel_values_videosreturnc                 f    U R                  U5      R                  S5      R                  SS5      nU$ )N   r   )rJ   flatten	transpose)r5   rU   xs      r*   forwardVJEPA2PatchEmbeddings3D.forwardo   s.    II)*2215??1Er)   )r?   rF   rJ   rG      )r   r    r!   r"   r#   r   intrE   staticmethodrS   r$   Tensorr\   r(   r9   r:   s   @r*   r<   r<   Q   s]      

 
 
" 
 
5<< ELL  r)   r<   c                   v   ^  \ rS rSrSrS
S\S\4U 4S jjjrS\R                  S\R                  4S jr
S	rU =r$ )VJEPA2Embeddingst   6
Construct mask token, position and patch embeddings.
r>   r?   c                    > [         TU ]  5         Xl        X l        [	        XS9U l        U R
                  R                  U l        UR                  U l        g )Nr?   )r2   rE   r>   r?   r<   patch_embeddingsrS   rF   rK   s      r*   rE   VJEPA2Embeddings.__init__y   sG    & 7 X00<< ++r)   rU   rV   c                 f   UR                   S   nUR                  SSSSS5      nX R                  R                  :  a)  UR	                  SSU R                  R                  SS5      nU R
                  R                  R                  R                  nUR                  US9nU R                  U5      nU$ )Nr   r   rX   r      )dtype)
shapepermuter>   rG   repeatri   rJ   weightrm   to)r5   rU   
num_framestarget_dtype
embeddingss        r*   r\   VJEPA2Embeddings.forward   s    (..q1
 299!Q1aH 000"5"<"<Q4;;C[C[]^`a"b,,1188>>144<4H**+>?
r)   )r>   r?   rS   ri   rF   r^   )r   r    r!   r"   r#   r   r`   rE   r$   rb   r\   r(   r9   r:   s   @r*   rd   rd   t   s@    ,| ,# , ,5<< ELL  r)   rd   modulequerykeyvalueattention_maskscalingdropoutc                    [         R                  " XR                  SS5      5      U-  n[        R                  R                  US[         R                  S9R                  UR                  5      n[        R                  R                  XU R                  S9nUb  X-  n[         R                  " X5      n	U	R                  SS5      R                  5       n	X4$ )Nr0   )dimrm   )ptrainingr   rX   )r$   matmulrZ   r   
functionalsoftmaxfloat32rr   rm   r}   r   
contiguous)
rw   rx   ry   rz   r{   r|   r}   kwargsattn_weightsattn_outputs
             r*   eager_attention_forwardr      s     <<}}R'<=GL ==((2U]](SVVW\WbWbcL ==((6??([L !#4,,|3K''1-88:K$$r)   c                 >   U R                  5       u  p#pE[        R                  " US-  U R                  U R                  S9nXeS-  -  nSSU-  -  nUR                  S5      U-  nUR                  5       nUR                  5       n	UR                  S5      R                  SSSS5      nU	R                  S5      R                  SSSS5      n	U R                  SS5      n
U
R                  SS	9u  p[        R                  " U* U4SS	9n
U
R                  S
5      n
X	-  X-  -   $ )NrX   rm   deviceg       @      ?i'  r0   r   )r0   rX   r   r   )sizer$   arangerm   r   	unsqueezesincossqueezerp   	unflattenunbindstackrY   )r[   posB	num_headsNDomegafreqemb_sinemb_cosyy1y2s                r*   rotate_queries_or_keysr      s   A!
 LLaqwwqxx@E	WE%,E==u$D hhjGhhjGoob!((Aq!4Goob!((Aq!4G 	
B AXX"XFBbS"I2&A			"AKAK((r)   c                   
  ^  \ rS rSr  SS\S\S\4U 4S jjjrS rS rSS jr	S	 r
   SS
\\R                     S\S\\R                     S\\\R                  \R                  4   \\R                     4   4S jjrSrU =r$ )VJEPA2RopeAttention   r>   r?   num_attention_headsc                 b  > [         TU ]  5         Xl        X l        X0l        X#-  S:w  a  [        SU4 SU S35      e[        X#-  5      U l        U R                  U R                  -  U l        [        R                  " X R                  UR                  S9U l        [        R                  " X R                  UR                  S9U l        [        R                  " X R                  UR                  S9U l        [        R                  " X"5      U l        UR                   U l        [        R$                  " U R"                  5      U l        U R                  R(                  U R                  R*                  -  U l        U R                  R.                  U R                  R0                  -  U l        [        SU R                  S-  S-  -  5      U l        [        SU R                  S-  S-  -  5      U l        [        SU R                  S-  S-  -  5      U l        U R                  S-  U l        S	U l        g )
Nr   zThe hidden size z4 is not a multiple of the number of attention heads .biasrX   r         F)r2   rE   r>   r?   r   
ValueErrorr`   attention_head_sizeall_head_sizer   Linearqkv_biasrx   ry   rz   rJ   attention_probs_dropout_probdropout_probDropoutr}   rQ   rF   	grid_sizerP   rG   
grid_depthd_dimh_dimw_dimr|   	is_causal)r5   r>   r?   r   r7   s       r*   rE   VJEPA2RopeAttention.__init__   s    	&#6 ,1"K>"2 3,-Q0 
 $'{'H#I !558P8PPYY{,>,>V__U
99[*<*<6??SYY{,>,>V__U
IIk7	"??zz$"3"34..$++2H2HH++559Q9QQt771<BCD
t771<BCD
t771<BCD
//5r)   c                 N    [        U R                  U R                  -  5      nX-  $ rN   )r`   r   )r5   idstokens_per_frames      r*   _get_frame_pos"VJEPA2RopeAttention._get_frame_pos   s#    t~~>?&&r)   c                     [        U R                  U R                  -  5      nU R                  U5      nXU-  -
  nU R                  nX-  $ rN   )r`   r   r   )r5   r   r   	frame_idstokens_per_rows        r*   _get_height_pos#VJEPA2RopeAttention._get_height_pos   sI    t~~>?'',	y00$$r)   c                    UR                   nUR                  S5      nUb-  UR                  S5      R                  SU R                  S5      nO[
        R                  " XCS9n[        U R                  U R                  -  5      nU R                  U5      nU R                  nU R                  U5      n	XVU-  -
  X-  -
  n
XyU
4$ )Nr   r   )r   r   r   rp   r   r$   r   r`   r   r   r   )r5   r[   masksr   
token_sizer   r   r   r   
height_ids	width_idss              r*   get_position_ids$VJEPA2RopeAttention.get_position_ids  s    VVAY
 //!$++At/G/GKC,,z9Ct~~>?'',	))#.
 i77>;VV	i//r)   c                    Uu  p4nSn[        USXfU R                  -   24   US9nX`R                  -  n[        USXfU R                  -   24   US9nX`R                  -  n[        USXfU R                  -   24   US9n	X`R                  -  nX`R                  :  a"  USUS 24   n
[
        R                  " XxX/SS9nU$ [
        R                  " XxU	/SS9nU$ )Nr   .)r   r0   r   )r   r   r   r   r   r$   cat)r5   qkpos_idsd_maskh_maskw_masksqkdqkhqkwqkrs              r*   apply_rotary_embeddings+VJEPA2RopeAttention.apply_rotary_embeddings  s    !($RQTZZ-?(?%@fM	ZZ$RQTZZ-?(?%@fM	ZZ$RQTZZ-?(?%@fM	ZZ'''S!"W+CCc/R8B 	 Cc?3B	r)   position_maskoutput_attentions	head_maskrV   c                 8   UR                   u  pVnU R                  U5      R                  USU R                  U R                  5      R                  SS5      nU R                  U5      R                  USU R                  U R                  5      R                  SS5      n	U R                  U5      R                  USU R                  U R                  5      R                  SS5      n
U R                  XS9nU R                  X5      n	U R                  X5      n[        nU R                  R                  S:w  aT  U R                  R                  S:X  a  U(       a  [        R                  S5        O[        U R                  R                     nU" U UU	U
UU R                   U R"                  U R$                  (       d  SOU R&                  S	9u  pUR)                  5       S S
 U R*                  4-   nU R-                  UR/                  U5      5      nU(       a  X4nU$ U4nU$ )Nr0   r   rX   )r   eagersdpa`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.        r   r|   r}   r   )rn   rx   viewr   r   rZ   ry   rz   r   r   r   r>   _attn_implementationloggerwarning_oncer   r   r|   r   r   r   r   rJ   reshape)r5   r   r   r   r   
batch_size
seq_length_query_layer	key_layervalue_layerr   attention_interfacecontext_layerattention_probsnew_context_layer_shapeoutputss                    r*   r\   VJEPA2RopeAttention.forward)  s    %2$7$7!
JJ}%T*b$":":D<T<TUYq!_ 	 HH]#T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 '''K00D	22;H(?;;++w6{{//69>O##L
 '>dkk>^>^&_#)<nnLL#}}C$2C2C	*
& #0"4"4"6s";t?Q?Q>S"S		-"7"78O"PQ6G=2 O\M]r)   )r   r   r>   r   r}   r   r   r   r   r?   r   ry   r   rJ   rx   r|   rz   r   )r_      rN   )NFN)r   r    r!   r"   r   r`   rE   r   r   r   r   r   r$   rb   boolr   r'   r\   r(   r9   r:   s   @r*   r   r      s      #%	## # !	# #J'%0*( 15"',06  -6  	6
 ELL)6 
uU\\5<</0%2EE	F6 6r)   r   input	drop_probr   rV   c                    US:X  d  U(       d  U $ SU-
  nU R                   S   4SU R                  S-
  -  -   nU[        R                  " X@R                  U R
                  S9-   nUR                  5         U R                  U5      U-  nU$ )a*  
Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).

Comment by Ross Wightman: This is the same as the DropConnect impl I created for EfficientNet, etc networks,
however, the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for changing the
layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use 'survival rate' as the
argument.
r   r   r   r   r   )rn   ndimr$   randrm   r   floor_div)r   r   r   	keep_probrn   random_tensorr6   s          r*   	drop_pathr  c  s     CxII[[^

Q 77E

5ELL YYMYYy!M1FMr)   c                      ^  \ rS rSrSrS
S\\   4U 4S jjjrS\R                  S\R                  4S jr
S\4S jrS	rU =r$ )VJEPA2DropPathix  zXDrop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).r   c                 .   > [         TU ]  5         Xl        g rN   )r2   rE   r   )r5   r   r7   s     r*   rE   VJEPA2DropPath.__init__{  s    "r)   r   rV   c                 B    [        XR                  U R                  5      $ rN   )r  r   r   )r5   r   s     r*   r\   VJEPA2DropPath.forward  s    FFr)   c                      SU R                    3$ )Nzp=r   r5   s    r*   
extra_reprVJEPA2DropPath.extra_repr  s    DNN#$$r)   r  rN   )r   r    r!   r"   r#   r   floatrE   r$   rb   r\   strr  r(   r9   r:   s   @r*   r  r  x  sJ    b#(5/ # #GU\\ Gell G%C % %r)   r  c                   v   ^  \ rS rSrS
S\S\S\4U 4S jjjrS\R                  S\R                  4S jr
S	rU =r$ )	VJEPA2MLPi  r>   r?   	mlp_ratioc                    > [         TU ]  5         U=pE[        X#-  5      n[        R                  " XFSS9U l        [        UR                     U l        [        R                  " XeSS9U l	        g NTr   )
r2   rE   r`   r   r   fc1r	   
hidden_act
activationfc2)r5   r>   r?   r  in_featuresout_featureshidden_featuresr7   s          r*   rE   VJEPA2MLP.__init__  sY    %00k5699[E !2!2399_Fr)   hidden_staterV   c                 l    U R                  U5      nU R                  U5      nU R                  U5      nU$ rN   )r  r  r  )r5   r   s     r*   r\   VJEPA2MLP.forward  s2    xx-|4xx-r)   )r  r  r  )r_         @)r   r    r!   r"   r   r`   r  rE   r$   rb   r\   r(   r9   r:   s   @r*   r  r    sH    G| G# GQV G GELL U\\  r)   r  c                      ^  \ rS rSrSr    SS\S\S\S\S\4
U 4S jjjr   SS	\	R                  S
\\	R                     S\\	R                     S\S\\	R                  S4   4
S jjrSrU =r$ )VJEPA2Layeri  zCThis corresponds to the Block class in the original implementation.r>   drop_path_rater?   r   r  c                   > [         TU ]  5         Xl        X0l        X@l        XPl        [        R                  " X1R                  S9U l	        [        XU5      U l        UR                  S:  a  [        U5      O[        R                  " 5       U l        [        R                  " X1R                  S9U l        [#        XUS9U l        g )Nepsr   )r?   r  )r2   rE   r>   r?   r   r  r   	LayerNormlayer_norm_epsnorm1r   	attentionr&  r  Identityr  norm2r  mlp)r5   r>   r&  r?   r   r  r7   s         r*   rE   VJEPA2Layer.__init__  s     	&#6 "\\+3H3HI
,VBUV;A;P;PSV;V7\^\g\g\i\\+3H3HI
V	Rr)   r   r   r   r   rV   .c                    UnU R                  U5      nU R                  UUUUS9nUS   nU R                  U5      U-   nUnU R                  U5      nU R	                  U5      nU R                  U5      U-   nUSS  nU4U-   nU$ )N)r   r   r   r   r   )r,  r-  r  r/  r0  )	r5   r   r   r   r   residualself_attention_outputsattention_outputr   s	            r*   r\   VJEPA2Layer.forward  s     !

=1!%'/	 "0 "
 2!4'788C !

=1/}5@ ), "W,r)   )	r-  r>   r  r?   r0  r  r,  r/  r   )r   r_   r   r#  )NNF)r   r    r!   r"   r#   r   r  r`   rE   r$   rb   r   r   r'   r\   r(   r9   r:   s   @r*   r%  r%    s    M
 !$#%SS S 	S
 !S S S. 15,0"'||  - ELL)	
   
u||S 	! r)   r%  c                      ^  \ rS rSrS\4U 4S jjr\    SS\\R                     S\\R                     S\
S\
S\4
S	 jj5       rS
rU =r$ )VJEPA2Encoderi  r>   c                 T  > [         TU ]  5         Xl        [        XR                  S9U l        [        UR                  5       Vs/ sH4  nUR                  S:  a  UR                  U-  UR                  S-
  -  OSPM6     nn[        R                  " [        UR                  5       Vs/ sH0  n[        UX2   UR                  UR                  UR                  S9PM2     sn5      U l        [        R                  " UR                  UR                   S9U l        SU l        g s  snf s  snf )Nrh   r   r   r&  r?   r   r  r(  F)r2   rE   r>   rd   r?   ru   rangenum_hidden_layersr&  r   
ModuleListr%  r   r  layerr*  r+  	layernormgradient_checkpointingr5   r>   idrop_path_ratesr7   s       r*   rE   VJEPA2Encoder.__init__  s   *6?Q?QR 6334
4 LRKcKcfgKgV""Q&&*B*BQ*FGmpp4 	 
 ]] v778	 9A #2#5 & 2 2(.(B(B$.. 9	

 f&8&8f>S>ST&+##

	s   :D (6D%rU   r   r   output_hidden_statesrV   c                 F   U(       a  SOS nU(       a  SOS nU R                  U5      n[        U R                  5       H:  u  pU(       a  Xh4-   nUb  X)   OS nU
" US X5      nUS   nU(       d  M2  X|S   4-   nM<     U R                  U5      nU(       a  Xh4-   n[	        UUUS9$ )Nr   r   r   r   r   r   )ru   	enumerater>  r?  r   )r5   rU   r   r   rE  r   all_hidden_statesall_self_attentionsr   rB  layer_modulelayer_head_masklayer_outputss                r*   r\   VJEPA2Encoder.forward  s     #7BD$5b4(;<(4OA#$58H$H!.7.CilO(oaM)!,M  &91=M<O&O#  5 }5 14D D++*
 	
r)   )r>   ru   r@  r>  r?  )NNFF)r   r    r!   r"   r   rE   r   r   r$   rb   r   r   r\   r(   r9   r:   s   @r*   r8  r8    ss    ,| ,0  7;,0"'%*!
%ell3!
 ELL)!
  	!

 #!
 
!
 !
r)   r8  tensorr   c                    / nU Hi  nUR                  U R                  5      nUR                  S5      R                  SSU R	                  S5      5      nU[
        R                  " U SUS9/-  nMk     [
        R                  " USS9$ )z
Args:
    tensor (`torch.Tensor`):
        Tensor of shape [batch_size, num_patches, feature_dim]
    masks (`List[torch.Tensor]`):
        List of tensors of shape [batch_size, num_patches] containing indices of patches to keep
r0   r   r   indexr   r   )rr   r   r   rp   r   r$   gatherr   )rO  r   all_masked_tensorsmask	mask_keeps        r*   apply_masksrW    sz     wwv}}%NN2&--aFKKOD	u||FKLL 
 99'Q//r)   c                      ^  \ rS rSrSrS\4U 4S jjr\S 5       r SS\	R                  S\\	R                     S\\	R                     S	\S
\\	R                  \	R                  4   4
S jjrSrU =r$ )VJEPA2PredictorEmbeddingsi  rf   r>   c                   > [         TU ]  5         Xl        [        R                  " UR
                  UR                  5      U l        SU l        UR                  U l
        UR                  U l        [        R                  " [        R                  " U R                  SSUR                  5      5      U l        UR                   U l        Xl        g )Nr   r   )r2   rE   r>   r   r   r?   pred_hidden_sizepredictor_embeddingsnum_mask_tokenspred_zero_init_mask_tokenszero_init_mask_tokenspred_num_mask_tokens	Parameterr$   zerosmask_tokensrF   r5   r>   r7   s     r*   rE   "VJEPA2PredictorEmbeddings.__init__!  s    $&IIf.@.@&BYBY$Z! %+%F%F"%::<<D4H4H!QPVPgPg(hi ++r)   c                 "   U R                   S:  aM  U R                   U R                  -  U R                  U R                  -  -  U R                  U R                  -  -  $ U R                  U R                  -  U R                  U R                  -  -  $ Nr   rO   rR   s    r*   rS   %VJEPA2PredictorEmbeddings.num_patches.  s    !!A%''6+>+>>##v'8'88:##v'8'88: $$(9(99f>N>NRXRcRc>cddr)   r   context_masktarget_mask
mask_indexrV   c                    UR                  S5      nU R                  U5      nX@R                  -  nU R                  U   nUS   R	                  5       S-   nUR                  XXS5      n[        Xs5      nUR                  [        U5      SS5      n[        R                  " Xg/SS9n	[        R                  " USS9n
[        R                  " USS9n[        R                  " X/SS9nX4$ )z
hidden_states : encoder outputs (context)
context_mask: tokens of the context (outputs from the encoder)
target_mask: tokens to predict
mask_index: index of the target mask to choose (useful for multiclip?)
r   r   r   )
r   r\  r]  rc  maxrp   rW  lenr$   r   )r5   r   ri  rj  rk  r   contexttargetmax_patch_numru   cmtmr   s                r*   r\   !VJEPA2PredictorEmbeddings.forward9  s     q!++M:  "6"66
!!*- $A**,q0q3V1 ..\!2Aq9YY0a8
 YY|+YY{*		2(*  r)   )r>   rc  r]  rF   r\  r_  r   )r   r    r!   r"   r#   r   rE   ra   rS   r$   rb   r1   r`   r'   r\   r(   r9   r:   s   @r*   rY  rY    s    |  e e &!||&! 5<<(&! %,,'	&!
 &! 
u||U\\)	*&! &!r)   rY  c                      ^  \ rS rSrS\4U 4S jjrSS jrS r\   SS\	R                  S\\	R                     S\\	R                     S	\\	R                     S
\S\S\4S jj5       rSrU =r$ )VJEPA2Predictorib  r>   c                   > [         TU ]  5         Xl        SU l        [	        U5      U l        [        UR                  5       Vs/ sH4  nUR                  S:  a  UR                  U-  UR                  S-
  -  OSPM6     nn[        R                  " [        UR                  5       Vs/ sH0  n[        UX2   UR                  UR                  UR                  S9PM2     sn5      U l        [        R                   " UR                  UR"                  S9U l        [        R&                  " UR                  UR(                  SS9U l        g s  snf s  snf )NFr   r   r:  r(  Tr   )r2   rE   r>   r@  rY  ru   r;  pred_num_hidden_layersr&  r   r=  r%  r[  pred_num_attention_headspred_mlp_ratior>  r*  r+  r?  r   r?   rJ   rA  s       r*   rE   VJEPA2Predictor.__init__c  s=   &+#3F; 6889
 : 0014 %%)V-J-JQ-NO : 	 
 ]] v<<=	 >A #2#5 & 7 7(.(G(G$33 >	

 f&=&=6CXCXYIIf55v7I7IPTU	+
	s   :E'6Ec           	         UR                  UR                  5      n[        R                  " USUS9nUR                  UR                  5      nUR	                  S5      R                  SSUR                  S5      5      n[        R                  " USUS9nUGbh  US   Gba  UR                  UR                  5      nUR                  SSSSS5      nUR	                  S5      R	                  S5      R                  SUR                  S5      UR                  S5      S5      R	                  S5      R                  SSSSUR                  S5      5      n[        R                  " USUS9nUR	                  S5      R	                  S5      R	                  S5      R                  SUR                  S5      UR                  S5      UR                  S5      S5      n[        R                  " USUS9nUR                  SSSSS5      nXU4$ )Nr   rQ  r0   r   rX   r   rl   )rr   r   r$   rS  r   expandr   ro   )r5   r   position_masksargsortr   hidden_states_argsort
argsort_4d
argsort_5ds           r*   sort_tokensVJEPA2Predictor.sort_tokens  s   **^223n!7K **]112 ' 1 1" 5 < <R]EWEWXZE[ \]AVW  Yq\%=jj!1!12G!))!Q1a8I!!!$1INN1-y~~a/@"E2BB	r(:;  YAZHI!!!$11INN1-y~~a/@)..QRBSUWX	  YAZHI!))!Q1a8Ii77r)   c                     UR                  UR                  5      n[        R                  " USS9nUR	                  S5      R                  SSUR                  S5      5      n[        R                  " USUS9nU$ )Nr   r   r0   rQ  )rr   r   r$   r  r   r}  r   rS  )r5   r   r  reverse_argsorts       r*   unsort_tokensVJEPA2Predictor.unsort_tokens  si    **]112--Q7)33B7>>r2}GYGYZ\G]^]Qr)   encoder_hidden_statesri  rj  r   r   rE  rV   c                 4   U(       a  SOS nU(       a  SOS n	[        X5      nUR                  u  pnU R                  XU5      u  p[        R                  " USS9nU R                  XX5      u  pn[        U R                  5       H=  u  nnU(       a  X4-   nUb  UU   OS nU" XUU5      nUS   nU(       d  M4  U	US   4-   n	M?     U(       a  X4-   nU R                  U5      nU R                  X5      nUS S 2US 24   nU R                  U5      n[        UUU	S9$ )Nr   r   r   r   rG  )rW  rn   ru   r$   r  r  rH  r>  r?  r  rJ   r   )r5   r  ri  rj  r   r   rE  r   rI  rJ  r   N_ctxtr   r   r~  r  rB  rK  rL  rM  s                       r*   r\   VJEPA2Predictor.forward  s?    #7BD$5b4 !,,A P,221(,8M]h(i% --A6373C3CMcj3v0y(4OA|#$58H$H!.7.CilO(YjkM)!,M  &9]1=M<O&O#  5   14D D}5**=B%aj1		-0++*
 	
r)   )r>   ru   r@  r>  r?  rJ   rN   )NFF)r   r    r!   r"   r   rE   r  r  r   r$   rb   r1   r   r   r   r\   r(   r9   r:   s   @r*   rv  rv  b  s    V| V88B  -1"'%*0
$||0
 5<<(0
 %,,'	0

 ELL)0
  0
 #0
 
0
 0
r)   rv  c                      ^  \ rS rSrSrS\4U 4S jjr  SS\R                  S\	\R                     S\	\
   S\\R                  \	\R                     4   4S	 jjrS
rU =r$ )VJEPA2PoolerSelfAttentioni  z=Multi-headed attention from 'Attention Is All You Need' paperr>   c                    > [         TU ]  5         Xl        UR                  U l        UR
                  U l        U R                  U R                  -  U l        U R                  U R                  -  U R                  :w  a&  [        SU R                   SU R                   S35      eU R                  S-  U l	        UR                  U l        SU l        [        R                  " U R                  U R                  5      U l        [        R                  " U R                  U R                  5      U l        [        R                  " U R                  U R                  5      U l        [        R                  " U R                  U R                  5      U l        g Nz;embed_dim must be divisible by num_heads (got `embed_dim`: z and `num_heads`: z).r   F)r2   rE   r>   r?   	embed_dimr   r   head_dimr   scaleattention_dropoutr}   r   r   r   k_projv_projq_projout_projrd  s     r*   rE   "VJEPA2PoolerSelfAttention.__init__  s   ++33$..8==4>>)T^^;MdnnM] ^NN#2'  ]]D(
//ii?ii?ii?		$..$..Ar)   r   r{   r   rV   c                    UR                   u  pEnU R                  U5      nU R                  U5      nU R                  U5      n	UR	                  XEU R
                  U R                  5      R                  SS5      nUR	                  XEU R
                  U R                  5      R                  SS5      nU	R	                  XEU R
                  U R                  5      R                  SS5      n	[        n
U R                  R                  S:w  aT  U R                  R                  S:X  a  U(       a  [        R                  S5        O[        U R                  R                     n
U
" U UUU	UU R                  U R                  U R                   (       d  SOU R"                  S9u  pUR%                  XEU5      R'                  5       nU R)                  U5      nU(       d  SnX4$ 	z#Input shape: Batch x Time x Channelr   rX   r   r   r   r   r   N)rn   r  r  r  r   r   r  rZ   r   r>   r   r   r   r   r   r  r   r}   r   r   r  )r5   r   r{   r   r   r   r  querieskeysvaluesr   r   r   s                r*   r\   !VJEPA2PoolerSelfAttention.forward  s    -:,?,?)
	++m,{{=)]+,,zt~~t}}U__`acdeyyOYYZ[]^_ZT^^T]]S]]^_abc(?;;++w6{{//69>O##L
 '>dkk>^>^&_#$7nnJJ#}}C$,,	%
! "))*)LWWYmmK0 L((r)   )r>   r}   r  r  r   r  r   r  r  r  r  NFr   r    r!   r"   r#   r   rE   r$   rb   r   r   r'   r\   r(   r9   r:   s   @r*   r  r    st    GB| B. 26,1	-)||-) !.-) $D>	-)
 
u||Xell33	4-) -)r)   r  c                      ^  \ rS rSrSrS\4U 4S jjr  SS\R                  S\R                  S\R                  S\	\R                     S	\	\
   S
\\R                  \	\R                     4   4S jjrSrU =r$ )VJEPA2PoolerCrossAttentioni"  z_It's different from other cross-attention layers, doesn't have output projection layer (o_proj)r>   c                   > [         TU ]  5         Xl        UR                  U l        UR
                  U l        U R                  U R                  -  U l        U R                  U R                  -  U R                  :w  a&  [        SU R                   SU R                   S35      eU R                  S-  U l	        UR                  U l        SU l        [        R                  " U R                  U R                  5      U l        [        R                  " U R                  U R                  5      U l        [        R                  " U R                  U R                  5      U l        g r  )r2   rE   r>   r?   r  r   r   r  r   r  r  r}   r   r   r   r  r  r  rd  s     r*   rE   #VJEPA2PoolerCrossAttention.__init__'  s    ++33$..8==4>>)T^^;MdnnM] ^NN#2'  ]]D(
//ii?ii?ii?r)   r  r  r  r{   r   rV   c                    UR                   u  pgnUR                   S   n	U R                  U5      nU R                  U5      nU R                  U5      nUR	                  XgU R
                  U R                  5      R                  SS5      nUR	                  XiU R
                  U R                  5      R                  SS5      nUR	                  XiU R
                  U R                  5      R                  SS5      n[        n
U R                  R                  S:w  aT  U R                  R                  S:X  a  U(       a  [        R                  S5        O[        U R                  R                     n
U
" U UUUUU R                  U R                  U R                   (       d  SOU R"                  S9u  pUR%                  XgU5      R'                  5       nU(       d  SnX4$ r  )rn   r  r  r  r   r   r  rZ   r   r>   r   r   r   r   r   r  r   r}   r   r   )r5   r  r  r  r{   r   r   q_seq_lengthr  kv_seq_lengthr   r   r   s                r*   r\   "VJEPA2PoolerCrossAttention.forward:  s    /6mm+
)

1++g&{{4 V$,,zWaabcefgyyDNNDMMR\\]^`abZV``abdef(?;;++w6{{//69>O##L
 '>dkk>^>^&_#$7nnJJ#}}C$,,	%
! "))*INYY[ L((r)   )
r>   r}   r  r  r   r  r   r  r  r  r  r  r:   s   @r*   r  r  "  s    i@| @0 26,1/)/) ll/) 	/)
 !./) $D>/) 
u||Xell33	4/) /)r)   r  c                      ^  \ rS rSrS\4U 4S jjr SS\R                  S\R                  S\\	   S\
\R                  S4   4S	 jjrS
rU =r$ )VJEPA2PoolerSelfAttentionLayerim  r>   c                 *  > [         TU ]  5         [        R                  " UR                  UR
                  S9U l        [        U5      U l        [        R                  " UR                  UR
                  S9U l	        [        XR                  S9U l        g Nr(  rh   )r2   rE   r   r*  r?   r+  layer_norm1r  	self_attnlayer_norm2r  r0  rd  s     r*   rE   'VJEPA2PoolerSelfAttentionLayer.__init__n  sj    <<(:(:@U@UV26:<<(:(:@U@UVV1C1CDr)   r   r{   r   rV   .c                     UnU R                  U5      nU R                  UUUS9u  pXA-   nUnU R                  U5      nU R                  U5      nXA-   nU4nU(       a  Xe4-  nU$ )a  
Args:
    hidden_states (`torch.FloatTensor`):
        Input to the layer of shape `(batch, seq_len, embed_dim)`.
    attention_mask (`torch.FloatTensor`):
        Attention mask of shape `(batch, 1, q_len, k_v_seq_len)` where padding elements are indicated by very large negative values.
    output_attentions (`bool`, *optional*, defaults to `False`):
        Whether or not to return the attentions tensors of all attention layers. See `attentions` under
        returned tensors for more detail.
)r   r{   r   )r  r  r  r0  )r5   r   r{   r   r3  r   r   s          r*   r\   &VJEPA2PoolerSelfAttentionLayer.forwardu  s      !((7&*nn')/ '5 '
#
 !0 ((7/ 0 "&Gr)   )r  r  r0  r  )Fr   r    r!   r"   r   rE   r$   rb   r   r   r'   r\   r(   r9   r:   s   @r*   r  r  m  s`    E| E -2	#||# # $D>	#
 
u||S 	!# #r)   r  c                      ^  \ rS rSrS\4U 4S jjr  SS\R                  S\R                  S\\R                     S\	S\
\R                  S	4   4
S
 jjrSrU =r$ )VJEPA2PoolerCrossAttentionLayeri  r>   c                 *  > [         TU ]  5         [        R                  " UR                  UR
                  S9U l        [        U5      U l        [        R                  " UR                  UR
                  S9U l	        [        XR                  S9U l        g r  )r2   rE   r   r*  r?   r+  r  r  
cross_attnr  r  r0  rd  s     r*   rE   (VJEPA2PoolerCrossAttentionLayer.__init__  sj    <<(:(:@U@UV4V<<<(:(:@U@UVV1C1CDr)   r  r   r{   r   rV   .c                     UnU R                  U5      nU R                  UUUUUS9tp&XR-   nUnU R                  U5      nU R                  U5      nXR-   nU4nU(       a  U[	        U5      -  nU$ )N)r{   r   )r  r  r  r0  r'   )r5   r  r   r{   r   r3  r   r   s           r*   r\   'VJEPA2PoolerCrossAttentionLayer.forward  s     ''5&*oo)/ '6 '
#  .  ''5xx-./u\**Gr)   )r  r  r  r0  r  r  r:   s   @r*   r  r    sr    E| E 26"' ll !.	
   
u||S 	! r)   r  c                   n   ^  \ rS rSrSrS\4U 4S jjrS\R                  S\R                  4S jr	Sr
U =r$ )	VJEPA2AttentivePooleri  zAttentive Poolerr>   c                 P  > [         TU ]  5         [        R                  " [        R
                  " SSUR                  5      5      U l        [        U5      U l	        [        R                  " [        UR                  5       Vs/ sH  n[        U5      PM     sn5      U l        g s  snf rg  )r2   rE   r   ra  r$   rb  r?   query_tokensr  cross_attention_layerr=  r;  num_pooler_layersr  self_attention_layers)r5   r>   r   r7   s      r*   rE   VJEPA2AttentivePooler.__init__  sx    LLQ6;M;M)NO%DV%L"%']]=B6C[C[=\]=\+F3=\]&
"]s   B#r   rV   c                     U R                    H  nU" US S9S   nM     U R                  R                  UR                  S   SS5      nU R	                  X15      S   nUR                  S5      $ )N)r{   r   r   )r  r  rp   rn   r  r   )r5   r   r>  r  s       r*   r\   VJEPA2AttentivePooler.forward  sn    //E dCAFL 0##**<+=+=a+@!QG11'HK##A&&r)   )r  r  r  )r   r    r!   r"   r#   r   rE   r$   rb   r\   r(   r9   r:   s   @r*   r  r    s2    
| 
'ELL 'U\\ ' 'r)   r  c                   B    \ rS rSr% \\S'   SrSrSr/ SQr	Sr
SrS rSrg	)
VJEPA2PreTrainedModeli  r>   vjepa2rU   T)r%  r  r  rY  c                    U R                   R                  nS n[        U[        5      (       a  U" UR                  US9  [        UR                  S5       HV  u  pEX$S-  -  nU" UR                  R                  R                  US9  U" UR                  R                  R                  US9  MX     U[        UR                  5      S-   S-  -  nU" UR                  R                  R                  R                  US9  g[        U[        5      (       aH  UR                  (       a%  UR                   R"                  R%                  5         gU" UR                   US9  g[        U[&        R(                  [&        R*                  [&        R,                  45      (       aD  U" UR                  US9  UR.                  b%  UR.                  R"                  R%                  5         gg[        U[&        R0                  5      (       aJ  UR.                  R"                  R%                  5         UR                  R"                  R3                  S5        gg)zInitialize the weightsc                     U R                   R                  [        R                  5      n[        R
                  R                  USUS9nUR                  U R                  5      U l         g )Nr   )meanstd)datarr   r$   r   r   inittrunc_normal_rm   )rq   r  data_float_32	data_inits       r*   trunc_normal_f32_>VJEPA2PreTrainedModel._init_weights.<locals>.trunc_normal_f32_  sG    "KKNN5==9M--m#3-OI#,,v||4FKr)   )r  r   g      ?Nr   )r>   initializer_ranger4   r  r  rH  r  r  r  rq   r0  r  rn  r  rY  r_  rc  r  zero_r   r   Conv2drH   r   r*  fill_)r5   rw   init_stdr  rB  r>  r  s          r*   _init_weights#VJEPA2PreTrainedModel._init_weights  s    ;;00	5
 f344f11x@%f&B&BAFS&)!%//":":"A"AsK!%))--"6"6C@ G c&">">?!CKKCf::>>BBIIsS 9::++""''--/!&"4"4(CBIIryy ABBfmm:{{&  &&( '--KK""$MM$$S) .r)   r   N)r   r    r!   r"   r   r&   base_model_prefixmain_input_namesupports_gradient_checkpointing_no_split_modules_supports_sdpa_supports_flash_attnr  r(   r   r)   r*   r  r    s3     +O&*# N*r)   r  c                     U b7  U R                  S5      R                  S5      n U R                  USSSS5      n U $ S/U-  n U $ )z
Inputs:
    - head_mask: bsz x seq_length x seq_length | None
Returns
    - [num_hidden_layers x batch x num_heads x seq_length x seq_length] | [num_hidden_layers]
Nr   r   r0   )r   r}  )r   r<  s     r*   _convert_head_mask_to_5dr    s[     ''*44Q7	$$%6BBG	  F..	r)   c                   H  ^  \ rS rSrS\4U 4S jjrS\4S jr\\	       SS\
R                  S\\
R                     S\\\
R                        S	\\
R                     S
\\\
R                        S\S\\   S\\   S\4S jj5       5       rS\
R                  4S jrSrU =r$ )VJEPA2Modeli  r>   c                    > [         TU ]  U5        Xl        [        U5      U l        [        U5      U l        U R                  5         g rN   )r2   rE   r>   r8  encoderrv  	predictor	post_initrd  s     r*   rE   VJEPA2Model.__init__  s9     $V,(0 	r)   rV   c                 B    U R                   R                  R                  $ rN   )r  ru   ri   r  s    r*   get_input_embeddings VJEPA2Model.get_input_embeddings"  s    ||&&777r)   rU   context_head_maskri  target_head_maskrj  skip_predictorr   rE  c	           	         Ub  UOU R                   R                  nUb  UOU R                   R                  nUc  [        S5      e[	        X R                   R
                  5      n[	        X@R                   R                  5      nU R                  UUUUS9n
U
R                  nUc  Uc  UR                  S5      nUR                  S5      n[        R                  " XR                  S9R                  S5      R                  US45      /n[        R                  " XR                  S9R                  S5      R                  US45      /nU(       dH  U R                  UUUUUUS9n[!        UR                  [#        X5      UR$                  UR&                  S9nOSn[)        U[#        X5      U
R$                  U
R&                  US	9nU$ )
a  
context_head_mask (`torch.Tensor` with shape `[num_heads]` or `[num_hidden_layers x num_heads]`, *optional*):
    The mask indicating if we should keep the heads or not (1.0 for keep, 0.0 for discard) for the context.
context_mask (`torch.Tensor` with shape `[batch_size, patch_size, 1]`, *optional*):
    The mask position ids indicating which encoder output patches are going to be exposed to the predictor.
    By default, this mask is created as torch.arange(N).unsqueeze(0).repeat(B,1), indicating full context
    available to the predictor.
target_head_mask (`torch.Tensor` with shape `[num_heads]` or `[num_hidden_layers x num_heads]`, *optional*):
    The mask indicating if we should keep the heads or not (1.0 for keep, 0.0 for discard) for the target.
target_mask (`torch.Tensor` with shape `[batch_size, patch_size, 1]`, *optional*):
    The mask position ids indicating which encoder output patches are going to be used as a prediction target
    for the predictor. By default, this mask is created as torch.arange(N).unsqueeze(0).repeat(B,1), indicating
    that the predictor should predict all encoder patches.
skip_predictor (bool):
    flag to skip the predictor forward, useful if you just need the encoder outputs
Nz'You have to specify pixel_values_videos)rU   r   r   rE  r   r   r   )r  ri  rj  r   r   rE  )r   r   r   r   )r   r   r   r   r.   )r>   r   rE  r   r  r<  rx  r  r   r   r$   r   r   r   rp   r  r   rW  r   r   r,   )r5   rU   r  ri  r  rj  r  r   rE  r   encoder_outputssequence_outputr   r   predictor_outputsr.   encoder_outputs                    r*   r\   VJEPA2Model.forward%  s   < 2C1N-TXT_T_TqTq$8$D $++JjJj 	 &FGG 55FHeHef34DkkFhFhi+/<< 3'/!5	 ,8 ,
 *;;K$7#((+A$$Q'A!LL3M3MNXXYZ[bbdeghcijkL <<2L2LMWWXYZaacdfgbhijK15&5)'*"3%9 2@ 2  D"3"E"E$/$M/==,77	   $9- +O J)77&11-
 r)   c                 <    U R                  U5      nUR                  $ rN   )r\   r   )r5   rU   r  s      r*   get_vision_featuresVJEPA2Model.get_vision_featuresy  s    &9:///r)   )r>   r  r  )NNNNFNN)r   r    r!   r"   r   rE   r<   r  r   r   r$   rb   r   r1   r   r,   r\   r  r(   r9   r:   s   @r*   r  r    s   | 8&= 8  59593748$,0/3P"\\P $ELL1P tELL12	P
 #5<<0P d5<<01P P $D>P 'tnP 
*P  Pd0%,, 0 0r)   r  z}
    V-JEPA 2 Model transformer with a video classification head on top (a linear layer on top of the attentive pooler).
    c                      ^  \ rS rSrS\4U 4S jjr\\   SS\R                  S\
\R                     S\
\   S\
\   S\\\4   4
S	 jj5       5       rS
rU =r$ )VJEPA2ForVideoClassificationi~  r>   c                   > [         TU ]  U5        UR                  U l        [        U5      U l        [        U5      U l        [        R                  " UR                  UR                  SS9U l
        U R                  5         g r  )r2   rE   
num_labelsr  r  r  poolerr   r   r?   
classifierr  rd  s     r*   rE   %VJEPA2ForVideoClassification.__init__  sd      ++!&) ,F3))F$6$68I8IPTU 	r)   rU   labelsr   rE  rV   c                     U R                  USUUS9nUR                  nU R                  U5      nU R                  U5      nSn	Ub  U R	                  XU R
                  S9n	[        U	UUR                  UR                  S9$ )a  
labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
    Labels for computing the image classification/regression loss. Indices should be in `[0, ...,
    config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
    `config.num_labels > 1` a classification loss is computed (Cross-Entropy).

Examples:

```python
>>> import torch
>>> import numpy as np
>>> from transformers import AutoVideoProcessor, VJEPA2ForVideoClassification

>>> device = "cuda"

>>> video_processor = AutoVideoProcessor.from_pretrained("facebook/vjepa2-vitl-fpc16-256-ssv2")
>>> model = VJEPA2ForVideoClassification.from_pretrained("facebook/vjepa2-vitl-fpc16-256-ssv2").to(device)

>>> video = np.ones((64, 256, 256, 3))  # 64 frames, 256x256 RGB
>>> inputs = video_processor(video, return_tensors="pt").to(device)

>>> # For inference
>>> with torch.no_grad():
...     outputs = model(**inputs)
>>> logits = outputs.logits

>>> predicted_label = logits.argmax(-1).item()
>>> print(model.config.id2label[predicted_label])

>>> # For training
>>> labels = torch.ones(1, dtype=torch.long, device=device)
>>> loss = model(**inputs, labels=labels).loss

```T)rU   r  r   rE  N)pooled_logitsr   r>   )losslogitsr   r   )	r  r   r  r  loss_functionr>   r   r   r   )
r5   rU   r   r   rE  r   r   pooler_outputr  r  s
             r*   r\   $VJEPA2ForVideoClassification.forward  s    X ++ 3/!5	  
 $55$56/%%FRVR]R]%^D$!//))	
 	
r)   )r  r  r  r  )NNN)r   r    r!   r"   r   rE   r   r   r$   rb   r   r   r   r'   r   r\   r(   r9   r:   s   @r*   r  r  ~  s    |   *.,0/3>
"\\>
 &>
 $D>	>

 'tn>
 
u++	,>
  >
r)   r  )r  r  r  )r   )r   F):dataclassesr   typingr   r   r   r$   r   activationsr	   modeling_layersr
   modeling_outputsr   r   modeling_utilsr   r   utilsr   r   r   r   configuration_vjepa2r   
get_loggerr   r   r   r,   Moduler<   rd   rb   r  r   r   r   r   r  r  r  r%  r8  r1   rW  rY  rv  r  r  r  r  r  r  r  r  r  __all__r   r)   r*   <module>r     s   " , ,   ! 9 F F K K . 
		H	% 
<; < < {  * bii  Fryy T %II%<<% 
% <<	%
 U\\*% % %<)6O")) OfU\\ e T V[VbVb *%RYY %		  4, 4n;
BII ;
|0 0T%,,-? 0ELL 0"C!		 C!Lv
bii v
rD)		 D)NG) G)V+%? +\%&@ %P'BII '& -*O -* -*` d0' d0 d0N 
N
#8 N

N
b Sr)   