
    <h                        S r SSKrSSKJr  SSKJrJrJr  SSK	r	SSK
r	SSK	Jr  SSKJr  SSKJr  SS	KJrJr  SS
KJrJr  SSKJrJr  SSKJrJrJr  SSKJr  \R>                  " \ 5      r!\\" SS9 " S S\5      5       5       r" " S S\RF                  5      r$ " S S\RF                  5      r% " S S\RF                  5      r& " S S\RF                  5      r' S=S\RF                  S\	RP                  S\	RP                  S\	RP                  S\\	RP                     S \)S!\)4S" jjr* " S# S$\RF                  5      r+ " S% S&\RF                  5      r, " S' S(\RF                  5      r- " S) S*\RF                  5      r. " S+ S,\RF                  5      r/ " S- S.\5      r0 " S/ S0\RF                  5      r1\ " S1 S2\5      5       r2\ " S3 S4\25      5       r3 " S5 S6\RF                  5      r4 " S7 S8\RF                  5      r5\" S9S9 " S: S;\25      5       r6/ S<Qr7g)>zPyTorch YOLOS model.    N)	dataclass)CallableOptionalUnion)nn   )ACT2FN)GradientCheckpointingLayer)BaseModelOutputBaseModelOutputWithPooling)ALL_ATTENTION_FUNCTIONSPreTrainedModel) find_pruneable_heads_and_indicesprune_linear_layer)ModelOutputauto_docstringlogging   )YolosConfigz5
    Output type of [`YolosForObjectDetection`].
    )custom_introc                   D   \ rS rSr% SrSr\\R                     \	S'   Sr
\\   \	S'   Sr\\R                     \	S'   Sr\\R                     \	S'   Sr\\\      \	S'   Sr\\R                     \	S	'   Sr\\\R                        \	S
'   Sr\\\R                        \	S'   Srg)YolosObjectDetectionOutput%   a  
loss (`torch.FloatTensor` of shape `(1,)`, *optional*, returned when `labels` are provided)):
    Total loss as a linear combination of a negative log-likehood (cross-entropy) for class prediction and a
    bounding box loss. The latter is defined as a linear combination of the L1 loss and the generalized
    scale-invariant IoU loss.
loss_dict (`Dict`, *optional*):
    A dictionary containing the individual losses. Useful for logging.
logits (`torch.FloatTensor` of shape `(batch_size, num_queries, num_classes + 1)`):
    Classification logits (including no-object) for all queries.
pred_boxes (`torch.FloatTensor` of shape `(batch_size, num_queries, 4)`):
    Normalized boxes coordinates for all queries, represented as (center_x, center_y, width, height). These
    values are normalized in [0, 1], relative to the size of each individual image in the batch (disregarding
    possible padding). You can use [`~YolosImageProcessor.post_process`] to retrieve the unnormalized bounding
    boxes.
auxiliary_outputs (`list[Dict]`, *optional*):
    Optional, only returned when auxiliary losses are activated (i.e. `config.auxiliary_loss` is set to `True`)
    and labels are provided. It is a list of dictionaries containing the two above keys (`logits` and
    `pred_boxes`) for each decoder layer.
last_hidden_state (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
    Sequence of hidden-states at the output of the last layer of the decoder of the model.
Nloss	loss_dictlogits
pred_boxesauxiliary_outputslast_hidden_statehidden_states
attentions )__name__
__module____qualname____firstlineno____doc__r   r   torchFloatTensor__annotations__r   dictr   r   r   listr   r    tupler!   __static_attributes__r"       `/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/models/yolos/modeling_yolos.pyr   r   %   s    , )-D(5$$
%, $Ix~$*.FHU&&'..2J**+2.2xT
+259x 1 1298<M8E%"3"345<59Ju00129r/   r   c                   r   ^  \ rS rSrSrS\SS4U 4S jjrS\R                  S\R                  4S jr	S	r
U =r$ )
YolosEmbeddingsL   zL
Construct the CLS token, detection tokens, position and patch embeddings.

configreturnNc                 x  > [         TU ]  5         [        R                  " [        R
                  " SSUR                  5      5      U l        [        R                  " [        R
                  " SUR                  UR                  5      5      U l	        [        U5      U l        U R                  R                  n[        R                  " [        R
                  " SX!R                  -   S-   UR                  5      5      U l        [        R                  " UR                  5      U l        [#        U5      U l        Xl        g Nr   )super__init__r   	Parameterr(   zeroshidden_size	cls_tokennum_detection_tokensdetection_tokensYolosPatchEmbeddingspatch_embeddingsnum_patchesposition_embeddingsDropouthidden_dropout_probdropout$InterpolateInitialPositionEmbeddingsinterpolationr4   )selfr4   rB   	__class__s      r0   r9   YolosEmbeddings.__init__R   s    ekk!Q8J8J&KL "U[[F<W<WY_YkYk-l m 4V <++77#%<<KK;)D)DDqH&J\J\]$
  zz&"<"<=A&Ir/   pixel_valuesc                 r   UR                   u  p#pEU R                  U5      nUR                  5       u  p'nU R                  R	                  USS5      n	U R
                  R	                  USS5      n
[        R                  " XU
4SS9nU R                  U R                  XE45      nXk-   nU R                  U5      nU$ )Nr   dim)shaperA   sizer=   expandr?   r(   catrH   rC   rF   )rI   rL   
batch_sizenum_channelsheightwidth
embeddingsseq_len_
cls_tokensr?   rC   s               r0   forwardYolosEmbeddings.forwarda   s    2>2D2D/
&**<8
!+!2
Q ^^**:r2>
0077
BKYY
8HIqQ
 #001I1IF?[5
\\*-
r/   )r=   r4   r?   rF   rH   rA   rC   r#   r$   r%   r&   r'   r   r9   r(   Tensorr]   r.   __classcell__rJ   s   @r0   r2   r2   L   s;    
{ t ELL U\\  r/   r2   c                   R   ^  \ rS rSrSU 4S jjrSS\R                  4S jjrSrU =r	$ )rG   w   r5   c                 .   > [         TU ]  5         Xl        g Nr8   r9   r4   rI   r4   rJ   s     r0   r9   -InterpolateInitialPositionEmbeddings.__init__x       r/   c                    US S 2SS S 24   nUS S 2S 4   nUS S 2U R                   R                  * S 2S S 24   nUS S 2SU R                   R                  * 2S S 24   nUR                  SS5      nUR                  u  pgnU R                   R                  S   U R                   R
                  -  U R                   R                  S   U R                   R
                  -  pUR                  XgX5      nUu  pXR                   R
                  -  XR                   R
                  -  p[        R                  R                  X]U4SSS9nUR                  S5      R                  SS5      n[        R                  " X5U4SS9nU$ )Nr   r      bicubicFrR   modealign_cornersrO   )r4   r>   	transposerQ   
image_size
patch_sizeviewr   
functionalinterpolateflattenr(   rT   )rI   	pos_embedimg_sizecls_pos_embeddet_pos_embedpatch_pos_embedrU   r<   rZ   patch_heightpatch_widthrW   rX   new_patch_heightnew_patch_widthscale_pos_embeds                   r0   r]   ,InterpolateInitialPositionEmbeddings.forward|   sn   !!Q'*%ag.!!dkk&F&F%F%H!"KL#AqDKK,L,L+L'La$OP)33Aq9+:+@+@(
 KK""1%)?)??KK""1%)?)?? " *..zb ,2kk6L6L,LeWbWbWmWmNm/--33_"EIej 4 
 *11!4>>q!D))]]$SYZ[r/   r4   r5   N)i   i@  
r#   r$   r%   r&   r9   r(   r`   r]   r.   ra   rb   s   @r0   rG   rG   w   s    %,,  r/   rG   c                   R   ^  \ rS rSrSU 4S jjrSS\R                  4S jjrSrU =r	$ ) InterpolateMidPositionEmbeddings   r5   c                 .   > [         TU ]  5         Xl        g rf   rg   rh   s     r0   r9   )InterpolateMidPositionEmbeddings.__init__   rj   r/   c                 R   US S 2S S 2SS S 24   nUS S 2S 4   nUS S 2S S 2U R                   R                  * S 2S S 24   nUS S 2S S 2SU R                   R                  * 2S S 24   nUR                  SS5      nUR                  u  pgpU R                   R                  S   U R                   R
                  -  U R                   R                  S   U R                   R
                  -  pUR                  Xg-  XU5      nUu  pXR                   R
                  -  XR                   R
                  -  p[        R                  R                  X^U4SSS9nUR                  S5      R                  SS5      R                  5       R                  XgX-  U5      n[        R                  " X5U4SS9nU$ )	Nr   r   rl   r   rm   Frn   rO   )r4   r>   rq   rQ   rr   rs   rt   r   ru   rv   rw   
contiguousr(   rT   )rI   rx   ry   rz   r{   r|   depthrU   r<   rZ   r}   r~   rW   rX   r   r   r   s                    r0   r]   (InterpolateMidPositionEmbeddings.forward   s   !!Q1*-%ag.!!Q)I)I(I(KQ"NO#Aq!t{{/O/O.O*OQR$RS)33Aq92A2G2G/; KK""1%)?)??KK""1%)?)?? " *..u/A;^ij ,2kk6L6L,LeWbWbWmWmNm/--33_"EIej 4 
 ##A&Yq!_Z\T%%5%GU	 	  ))]]$SYZ[r/   r   r   r   r   rb   s   @r0   r   r      s    %,,  r/   r   c                   f   ^  \ rS rSrSrU 4S jrS\R                  S\R                  4S jrSr	U =r
$ )r@      z
This class turns `pixel_values` of shape `(batch_size, num_channels, height, width)` into the initial
`hidden_states` (patch embeddings) of shape `(batch_size, seq_length, hidden_size)` to be consumed by a
Transformer.
c                   > [         TU ]  5         UR                  UR                  p2UR                  UR
                  pT[        U[        R                  R                  5      (       a  UOX"4n[        U[        R                  R                  5      (       a  UOX34nUS   US   -  US   US   -  -  nX l        X0l        X@l        X`l
        [        R                  " XEX3S9U l        g )Nr   r   )kernel_sizestride)r8   r9   rr   rs   rV   r<   
isinstancecollectionsabcIterablerB   r   Conv2d
projection)rI   r4   rr   rs   rV   r<   rB   rJ   s          r0   r9   YolosPatchEmbeddings.__init__   s    !'!2!2F4E4EJ$*$7$79K9Kk#-j+//:R:R#S#SZZdYq
#-j+//:R:R#S#SZZdYq
!!}
15*Q-:VW=:XY$$(&))L:ir/   rL   r5   c                     UR                   u  p#pEX0R                  :w  a  [        S5      eU R                  U5      R	                  S5      R                  SS5      nU$ )NzeMake sure that the channel dimension of the pixel values match with the one set in the configuration.rl   r   )rQ   rV   
ValueErrorr   rw   rq   )rI   rL   rU   rV   rW   rX   rY   s          r0   r]   YolosPatchEmbeddings.forward   s\    2>2D2D/
&,,,w  __\2::1=GG1M
r/   )rr   rV   rB   rs   r   )r#   r$   r%   r&   r'   r9   r(   r`   r]   r.   ra   rb   s   @r0   r@   r@      s.    jELL U\\  r/   r@   modulequerykeyvalueattention_maskscalingrF   c                    [         R                  " XR                  SS5      5      U-  n[        R                  R                  US[         R                  S9R                  UR                  5      n[        R                  R                  XU R                  S9nUb  X-  n[         R                  " X5      n	U	R                  SS5      R                  5       n	X4$ )NrN   )rP   dtype)ptrainingr   rl   )r(   matmulrq   r   ru   softmaxfloat32tor   rF   r   r   )
r   r   r   r   r   r   rF   kwargsattn_weightsattn_outputs
             r0   eager_attention_forwardr      s     <<}}R'<=GL ==((2U]](SVVW\WbWbcL ==((6??([L !#4,,|3K''1-88:K$$r/   c            
          ^  \ rS rSrS\SS4U 4S jjr  S
S\\R                     S\	S\
\\R                  \R                  4   \\R                     4   4S jjrS	rU =r$ )YolosSelfAttention   r4   r5   Nc                 0  > [         TU ]  5         UR                  UR                  -  S:w  a7  [	        US5      (       d&  [        SUR                   SUR                   S35      eXl        UR                  U l        [        UR                  UR                  -  5      U l        U R                  U R                  -  U l	        UR                  U l        U R                  S-  U l        SU l        [        R                  " UR                  U R                  UR                   S9U l        [        R                  " UR                  U R                  UR                   S9U l        [        R                  " UR                  U R                  UR                   S9U l        g )	Nr   embedding_sizezThe hidden size z4 is not a multiple of the number of attention heads .g      F)bias)r8   r9   r<   num_attention_headshasattrr   r4   intattention_head_sizeall_head_sizeattention_probs_dropout_probdropout_probr   	is_causalr   Linearqkv_biasr   r   r   rh   s     r0   r9   YolosSelfAttention.__init__   sG    : ::a?PVXhHiHi"6#5#5"6 7334A7 
 #)#=#= #&v'9'9F<V<V'V#W !558P8PP"??//5YYv1143E3EFOO\
99V//1C1C&//ZYYv1143E3EFOO\
r/   	head_maskoutput_attentionsc                    UR                   u  pEnU R                  U5      R                  USU R                  U R                  5      R                  SS5      nU R                  U5      R                  USU R                  U R                  5      R                  SS5      nU R                  U5      R                  USU R                  U R                  5      R                  SS5      n	[        n
U R                  R                  S:w  aT  U R                  R                  S:X  a  U(       a  [        R                  S5        O[        U R                  R                     n
U
" U U	UUUU R                  U R                  U R                   (       d  SOU R"                  S9u  pUR%                  5       S S	 U R&                  4-   nUR)                  U5      nU(       a  X4nU$ U4nU$ )
NrN   r   rl   eagersdpaz`torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to eager attention. This warning can be removed using the argument `attn_implementation="eager"` when loading the model.        )r   r   rF   r   )rQ   r   rt   r   r   rq   r   r   r   r4   _attn_implementationloggerwarning_oncer   r   r   r   r   rR   r   reshape)rI   r    r   r   rU   
seq_lengthr[   	key_layervalue_layerquery_layerattention_interfacecontext_layerattention_probsnew_context_layer_shapeoutputss                  r0   r]   YolosSelfAttention.forward  s    %2$7$7!
HH]#T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 JJ}%T*b$":":D<T<TUYq!_ 	 )@;;++w6{{//69>O##L
 '>dkk>^>^&_#)<nnLL#}}C$2C2C	*
& #0"4"4"6s";t?Q?Q>S"S%--.EF6G=2 O\M]r/   )
r   r   r4   r   r   r   r   r   r   r   NF)r#   r$   r%   r&   r   r9   r   r(   r`   boolr   r-   r]   r.   ra   rb   s   @r0   r   r      sw    ]{ ]t ]. -1"'	1 ELL)1  	1
 
uU\\5<</0%2EE	F1 1r/   r   c                      ^  \ rS rSrSrS\SS4U 4S jjrS\R                  S\R                  S\R                  4S	 jr	S
r
U =r$ )YolosSelfOutputi@  z
The residual connection is defined in YolosLayer instead of here (as is the case with other models), due to the
layernorm applied before each block.
r4   r5   Nc                    > [         TU ]  5         [        R                  " UR                  UR                  5      U l        [        R                  " UR                  5      U l        g rf   )	r8   r9   r   r   r<   denserD   rE   rF   rh   s     r0   r9   YolosSelfOutput.__init__F  sB    YYv1163E3EF
zz&"<"<=r/   r    input_tensorc                 J    U R                  U5      nU R                  U5      nU$ rf   r   rF   rI   r    r   s      r0   r]   YolosSelfOutput.forwardK  s$    

=1]3r/   r   r_   rb   s   @r0   r   r   @  sI    
>{ >t >
U\\  RWR^R^  r/   r   c                      ^  \ rS rSrS\SS4U 4S jjrS\\   SS4S jr  SS\	R                  S	\\	R                     S
\S\\\	R                  \	R                  4   \\	R                     4   4S jjrSrU =r$ )YolosAttentioniS  r4   r5   Nc                    > [         TU ]  5         [        U5      U l        [	        U5      U l        [        5       U l        g rf   )r8   r9   r   	attentionr   outputsetpruned_headsrh   s     r0   r9   YolosAttention.__init__T  s0    +F3%f-Er/   headsc                 6   [        U5      S:X  a  g [        XR                  R                  U R                  R                  U R
                  5      u  p[        U R                  R                  U5      U R                  l        [        U R                  R                  U5      U R                  l        [        U R                  R                  U5      U R                  l	        [        U R                  R                  USS9U R                  l        U R                  R                  [        U5      -
  U R                  l        U R                  R                  U R                  R                  -  U R                  l        U R
                  R                  U5      U l        g )Nr   r   rO   )lenr   r   r   r   r   r   r   r   r   r   r   r   union)rI   r   indexs      r0   prune_headsYolosAttention.prune_headsZ  s   u:?7>>55t~~7Y7Y[_[l[l

  2$..2F2FN/0B0BEJ1$..2F2FN.t{{/@/@%QO .2^^-O-ORUV[R\-\*'+~~'I'IDNNLnLn'n$ --33E:r/   r    r   r   c                 f    U R                  XU5      nU R                  US   U5      nU4USS  -   nU$ )Nr   r   )r   r   )rI   r    r   r   self_outputsattention_outputr   s          r0   r]   YolosAttention.forwardl  sC     ~~m@QR;;|AF#%QR(88r/   )r   r   r   r   )r#   r$   r%   r&   r   r9   r   r   r   r(   r`   r   r   r   r-   r]   r.   ra   rb   s   @r0   r   r   S  s    "{ "t ";S ;d ;* -1"'	|| ELL)  	
 
uU\\5<</0%2EE	F r/   r   c                   n   ^  \ rS rSrS\SS4U 4S jjrS\R                  S\R                  4S jrSr	U =r
$ )	YolosIntermediatei{  r4   r5   Nc                   > [         TU ]  5         [        R                  " UR                  UR
                  5      U l        [        UR                  [        5      (       a  [        UR                     U l        g UR                  U l        g rf   )r8   r9   r   r   r<   intermediate_sizer   r   
hidden_actstrr	   intermediate_act_fnrh   s     r0   r9   YolosIntermediate.__init__|  s`    YYv1163K3KL
f''--'-f.?.?'@D$'-'8'8D$r/   r    c                 J    U R                  U5      nU R                  U5      nU$ rf   r   r   )rI   r    s     r0   r]   YolosIntermediate.forward  s&    

=100?r/   r   r#   r$   r%   r&   r   r9   r(   r`   r]   r.   ra   rb   s   @r0   r   r   {  s6    9{ 9t 9U\\ ell  r/   r   c                      ^  \ rS rSrS\SS4U 4S jjrS\R                  S\R                  S\R                  4S jrS	r	U =r
$ )
YolosOutputi  r4   r5   Nc                    > [         TU ]  5         [        R                  " UR                  UR
                  5      U l        [        R                  " UR                  5      U l	        g rf   )
r8   r9   r   r   r   r<   r   rD   rE   rF   rh   s     r0   r9   YolosOutput.__init__  sB    YYv779K9KL
zz&"<"<=r/   r    r   c                 R    U R                  U5      nU R                  U5      nX-   nU$ rf   r   r   s      r0   r]   YolosOutput.forward  s,    

=1]3%4r/   r   r  rb   s   @r0   r  r    sD    >{ >t >
U\\  RWR^R^  r/   r  c                      ^  \ rS rSrSrS\SS4U 4S jjr  SS\R                  S\	\R                     S	\
S\\\R                  \R                  4   \\R                     4   4S
 jjrSrU =r$ )
YolosLayeri  z?This corresponds to the Block class in the timm implementation.r4   r5   Nc                 j  > [         TU ]  5         UR                  U l        SU l        [	        U5      U l        [        U5      U l        [        U5      U l	        [        R                  " UR                  UR                  S9U l        [        R                  " UR                  UR                  S9U l        g )Nr   eps)r8   r9   chunk_size_feed_forwardseq_len_dimr   r   r   intermediater  r   r   	LayerNormr<   layer_norm_epslayernorm_beforelayernorm_afterrh   s     r0   r9   YolosLayer.__init__  s    '-'E'E$'/-f5!&) "V-?-?VEZEZ [!||F,>,>FDYDYZr/   r    r   r   c                     U R                  U R                  U5      UUS9nUS   nUSS  nXQ-   nU R                  U5      nU R                  U5      nU R	                  Xq5      nU4U-   nU$ )N)r   r   r   )r   r  r  r  r   )rI   r    r   r   self_attention_outputsr   r   layer_outputs           r0   r]   YolosLayer.forward  s     "&!!-0/ "0 "

 2!4(, )8 ++M:((6 {{<?/G+r/   )r   r  r  r  r  r   r  r   )r#   r$   r%   r&   r'   r   r9   r(   r`   r   r   r   r-   r]   r.   ra   rb   s   @r0   r
  r
    s    I[{ [t [ -1"'	|| ELL)  	
 
uU\\5<</0%2EE	F r/   r
  c                      ^  \ rS rSrS\SS4U 4S jjr    SS\R                  S\\R                     S\	S	\	S
\	S\
\\4   4S jjrSrU =r$ )YolosEncoderi  r4   r5   Nc                 \  > [         TU ]  5         Xl        [        R                  " [        UR                  5       Vs/ sH  n[        U5      PM     sn5      U l        SU l	        SUR                  S   UR                  S   -  UR                  S-  -  -   UR                  -   nUR                  (       aD  [        R                  " [        R                   " UR                  S-
  SUUR"                  5      5      OS U l        UR                  (       a  ['        U5      U l        g S U l        g s  snf )NFr   r   rl   )r8   r9   r4   r   
ModuleListrangenum_hidden_layersr
  layergradient_checkpointingrr   rs   r>   use_mid_position_embeddingsr:   r(   r;   r<   mid_position_embeddingsr   rH   )rI   r4   r[   r   rJ   s       r0   r9   YolosEncoder.__init__  s
   ]]fF^F^@_#`@_1Jv$6@_#`a
&+# ""1%(9(9!(<<@Q@QST@TTUX^XsXss 	 11 LL,,q0&&	  	$ JPIkIk=fEqu' $as   D)r    r   r   output_hidden_statesreturn_dictc                 $   U(       a  SOS nU(       a  SOS n	U R                   R                  (       a  U R                  U R                  X#45      n
[	        U R
                  5       Hx  u  pU(       a  X4-   nUb  XK   OS nU" XU5      nUS   nU R                   R                  (       a$  XR                   R                  S-
  :  a  UW
U   -   nU(       d  Mp  XS   4-   n	Mz     U(       a  X4-   nU(       d  [        S XU	4 5       5      $ [        UUU	S9$ )Nr"   r   r   c              3   ,   #    U H  oc  M  Uv   M     g 7frf   r"   ).0vs     r0   	<genexpr>'YolosEncoder.forward.<locals>.<genexpr>  s     m$[q$[s   	)r   r    r!   )	r4   r"  rH   r#  	enumerater   r  r-   r   )rI   r    rW   rX   r   r   r%  r&  all_hidden_statesall_self_attentions$interpolated_mid_position_embeddingsilayer_modulelayer_head_masklayer_outputss                  r0   r]   YolosEncoder.forward  s    #7BD$5b4;;22373E3EdFbFbekds3t0(4OA#$58H$H!.7.CilO(IZ[M)!,M{{66559:$14XYZ4[$[M  &91=M<O&O#  5"   14D Dm]GZ$[mmm++*
 	
r/   )r4   r!  rH   r   r#  )NFFT)r#   r$   r%   r&   r   r9   r(   r`   r   r   r   r-   r   r]   r.   ra   rb   s   @r0   r  r    s    v{ vt v: -1"'%* *
||*

 ELL)*
  *
 #*
 *
 
uo%	&*
 *
r/   r  c                       \ rS rSr% \\S'   SrSrSr/ r	Sr
SrSrSrS\\R                   \R"                  \R$                  4   SS4S	 jrS
rg)YolosPreTrainedModeli  r4   vitrL   Tr   r5   Nc                 
   [        U[        R                  [        R                  45      (       ak  UR                  R
                  R                  SU R                  R                  S9  UR                  b%  UR                  R
                  R                  5         gg[        U[        R                  5      (       aJ  UR                  R
                  R                  5         UR                  R
                  R                  S5        gg)zInitialize the weightsr   )meanstdNg      ?)r   r   r   r   weightdatanormal_r4   initializer_ranger   zero_r  fill_)rI   r   s     r0   _init_weights"YolosPreTrainedModel._init_weights  s    fryy"))455 MM&&CT[[5R5R&S{{&  &&( '--KK""$MM$$S) .r/   r"   )r#   r$   r%   r&   r   r*   base_model_prefixmain_input_namesupports_gradient_checkpointing_no_split_modules_supports_sdpa_supports_flash_attn_supports_flex_attn_supports_attention_backendr   r   r   r   r  rB  r.   r"   r/   r0   r7  r7    sb    $O&*#N"&
*E"))RYY*L$M 
*RV 
*r/   r7  c                      ^  \ rS rSrSS\S\4U 4S jjjrS\4S jrS\	\
\\
   4   SS4S	 jr\     SS
\\R                      S\\R                      S\\   S\\   S\\   S\\\4   4S jj5       rSrU =r$ )
YolosModeli%  r4   add_pooling_layerc                   > [         TU ]  U5        Xl        [        U5      U l        [        U5      U l        [        R                  " UR                  UR                  S9U l        U(       a  [        U5      OSU l        U R                  5         g)z^
add_pooling_layer (bool, *optional*, defaults to `True`):
    Whether to add a pooling layer
r  N)r8   r9   r4   r2   rY   r  encoderr   r  r<   r  	layernormYolosPoolerpooler	post_init)rI   r4   rN  rJ   s      r0   r9   YolosModel.__init__'  si    
 	 )&1#F+f&8&8f>S>ST->k&)D 	r/   r5   c                 .    U R                   R                  $ rf   )rY   rA   )rI   s    r0   get_input_embeddingsYolosModel.get_input_embeddings8  s    ///r/   heads_to_pruneNc                     UR                  5        H7  u  p#U R                  R                  U   R                  R	                  U5        M9     g)z
Prunes heads of the model.

Args:
    heads_to_prune (`dict`):
        See base class `PreTrainedModel`. The input dictionary must have the following format: {layer_num:
        list of heads to prune in this layer}
N)itemsrP  r   r   r   )rI   rY  r   r   s       r0   _prune_headsYolosModel._prune_heads;  s<     +002LELLu%//;;EB 3r/   rL   r   r   r%  r&  c           
      d   Ub  UOU R                   R                  nUb  UOU R                   R                  nUb  UOU R                   R                  nUc  [	        S5      eU R                  X R                   R                  5      nU R                  U5      nU R                  UUR                  S   UR                  S   UUUUS9nUS   nU R                  U5      nU R                  b  U R                  U5      OS n	U(       d  U	b  X4OU4n
XSS  -   $ [        UU	UR                  UR                  S9$ )Nz You have to specify pixel_valuesr   rN   )rW   rX   r   r   r%  r&  r   r   )r   pooler_outputr    r!   )r4   r   r%  use_return_dictr   get_head_maskr  rY   rP  rQ   rQ  rS  r   r    r!   )rI   rL   r   r   r%  r&  embedding_outputencoder_outputssequence_outputpooled_outputhead_outputss              r0   r]   YolosModel.forwardG  sL    2C1N-TXT_T_TqTq$8$D $++JjJj 	 &1%<k$++B]B]?@@ &&y++2O2OP	??<8,,%%b)$$R(/!5# ' 
 *!,..98<8OO4UY?L?XO;_n^pL!""555)-')77&11	
 	
r/   )r4   rY   rP  rQ  rS  )T)NNNNN)r#   r$   r%   r&   r   r   r9   r@   rW  r+   r   r,   r\  r   r   r(   r`   r   r-   r   r]   r.   ra   rb   s   @r0   rM  rM  %  s    { t  "0&: 0
C4T#Y+? 
CD 
C  04,0,0/3&*0
u||,0
 ELL)0
 $D>	0

 'tn0
 d^0
 
u00	10
 0
r/   rM  c                   6   ^  \ rS rSrS\4U 4S jjrS rSrU =r$ )rR  i{  r4   c                    > [         TU ]  5         [        R                  " UR                  UR                  5      U l        [        R                  " 5       U l        g rf   )r8   r9   r   r   r<   r   Tanh
activationrh   s     r0   r9   YolosPooler.__init__|  s9    YYv1163E3EF
'')r/   c                 \    US S 2S4   nU R                  U5      nU R                  U5      nU$ )Nr   )r   rk  )rI   r    first_token_tensorre  s       r0   r]   YolosPooler.forward  s6     +1a40

#566r/   )rk  r   )	r#   r$   r%   r&   r   r9   r]   r.   ra   rb   s   @r0   rR  rR  {  s    ${ $
 r/   rR  c                   2   ^  \ rS rSrSrU 4S jrS rSrU =r$ )YolosMLPPredictionHeadi  z
Very simple multi-layer perceptron (MLP, also called FFN), used to predict the normalized center coordinates,
height and width of a bounding box w.r.t. an image.

Copied from https://github.com/facebookresearch/detr/blob/master/models/detr.py

c                    > [         TU ]  5         X@l        U/US-
  -  n[        R                  " S [        U/U-   XS/-   5       5       5      U l        g )Nr   c              3   P   #    U H  u  p[         R                  " X5      v   M     g 7frf   )r   r   )r)  nks      r0   r+  2YolosMLPPredictionHead.__init__.<locals>.<genexpr>  s     #g@fBIIaOO@fs   $&)r8   r9   
num_layersr   r  ziplayers)rI   	input_dim
hidden_dim
output_dimrw  hrJ   s         r0   r9   YolosMLPPredictionHead.__init__  sN    $LJN+mm#gYKRSOUVYeUe@f#ggr/   c                     [        U R                  5       HD  u  p#X R                  S-
  :  a%  [        R                  R                  U" U5      5      OU" U5      nMF     U$ r7   )r-  ry  rw  r   ru   relu)rI   xr1  r   s       r0   r]   YolosMLPPredictionHead.forward  sI    !$++.HA01OOa4G0G""58,USTXA /r/   )ry  rw  )	r#   r$   r%   r&   r'   r9   r]   r.   ra   rb   s   @r0   rq  rq    s    h r/   rq  zy
    YOLOS Model (consisting of a ViT encoder) with object detection heads on top, for tasks such as COCO detection.
    c                      ^  \ rS rSrS\4U 4S jjr\R                  R                  S 5       r	\
    SS\R                  S\\\      S\\   S\\   S	\\   S
\\\4   4S jj5       rSrU =r$ )YolosForObjectDetectioni  r4   c                   > [         TU ]  U5        [        USS9U l        [	        UR
                  UR
                  UR                  S-   SS9U l        [	        UR
                  UR
                  SSS9U l        U R                  5         g )NF)rN  r   r   )rz  r{  r|  rw     )
r8   r9   rM  r8  rq  r<   
num_labelsclass_labels_classifierbbox_predictorrT  rh   s     r0   r9    YolosForObjectDetection.__init__  s      f> (>((V5G5GTZTeTehiTivw(
$ 5((V5G5GTUbc

 	r/   c                 ^    [        US S US S 5       VVs/ sH	  u  p4X4S.PM     snn$ s  snnf )NrN   )r   r   )rx  )rI   outputs_classoutputs_coordabs        r0   _set_aux_loss%YolosForObjectDetection._set_aux_loss  s;    
 <?}Sb?QS`adbdSe;fg;f411.;fgggs   )rL   labelsr   r%  r&  r5   c                    Ub  UOU R                   R                  nU R                  UUUUS9nUS   nUSS2U R                   R                  * S2SS24   nU R	                  U5      nU R                  U5      R                  5       n	Su  pnUb  Su  pU R                   R                  (       aH  U(       a  UR                  OUS   nU R	                  U5      nU R                  U5      R                  5       nU R                  XU R                  XR                   X5      u  pnU(       d  Ub
  X4U-   U-   nOX4U-   nU
b  X4U-   $ U$ [        U
UUU	UUR                  UR                  UR                  S9$ )a  
labels (`list[Dict]` of len `(batch_size,)`, *optional*):
    Labels for computing the bipartite matching loss. List of dicts, each dictionary containing at least the
    following 2 keys: `'class_labels'` and `'boxes'` (the class labels and bounding boxes of an image in the
    batch respectively). The class labels themselves should be a `torch.LongTensor` of len `(number of bounding
    boxes in the image,)` and the boxes a `torch.FloatTensor` of shape `(number of bounding boxes in the image,
    4)`.

Examples:

```python
>>> from transformers import AutoImageProcessor, AutoModelForObjectDetection
>>> import torch
>>> from PIL import Image
>>> import requests

>>> url = "http://images.cocodataset.org/val2017/000000039769.jpg"
>>> image = Image.open(requests.get(url, stream=True).raw)

>>> image_processor = AutoImageProcessor.from_pretrained("hustvl/yolos-tiny")
>>> model = AutoModelForObjectDetection.from_pretrained("hustvl/yolos-tiny")

>>> inputs = image_processor(images=image, return_tensors="pt")
>>> outputs = model(**inputs)

>>> # convert outputs (bounding boxes and class logits) to Pascal VOC format (xmin, ymin, xmax, ymax)
>>> target_sizes = torch.tensor([image.size[::-1]])
>>> results = image_processor.post_process_object_detection(outputs, threshold=0.9, target_sizes=target_sizes)[
...     0
... ]

>>> for score, label, box in zip(results["scores"], results["labels"], results["boxes"]):
...     box = [round(i, 2) for i in box.tolist()]
...     print(
...         f"Detected {model.config.id2label[label.item()]} with confidence "
...         f"{round(score.item(), 3)} at location {box}"
...     )
Detected remote with confidence 0.991 at location [46.48, 72.78, 178.98, 119.3]
Detected remote with confidence 0.908 at location [336.48, 79.27, 368.23, 192.36]
Detected cat with confidence 0.934 at location [337.18, 18.06, 638.14, 373.09]
Detected cat with confidence 0.979 at location [10.93, 53.74, 313.41, 470.67]
Detected remote with confidence 0.974 at location [41.63, 72.23, 178.09, 119.99]
```N)r   r%  r&  r   )NNN)NNr  )r   r   r   r   r   r   r    r!   )r4   r`  r8  r>   r  r  sigmoidauxiliary_lossintermediate_hidden_statesloss_functiondevicer   r   r    r!   )rI   rL   r  r   r%  r&  r   rd  r   r   r   r   r   r  r  r  r   s                    r0   r]   YolosForObjectDetection.forward  s   h &1%<k$++B]B] ((/!5#	  
 "!* *!dkk.N.N-N-PRS*ST --o>((9AAC
-=**+5(M{{))EPwAAV]^_V` $ < <\ J $ 3 3L A I I K151C1CZm2.D.  , -0AAGK -7373CT%.OO)!/%77!//))	
 		
r/   )r  r  r8  )NNNN)r#   r$   r%   r&   r   r9   r(   jitunusedr  r   r)   r   r,   r+   r   r   r-   r   r]   r.   ra   rb   s   @r0   r  r    s    { & YYh h  (,,0/3&*a
''a
 d$a
 $D>	a

 'tna
 d^a
 
u00	1a
 a
r/   r  )r  rM  r7  )r   )8r'   collections.abcr   dataclassesr   typingr   r   r   r(   torch.utils.checkpointr   activationsr	   modeling_layersr
   modeling_outputsr   r   modeling_utilsr   r   pytorch_utilsr   r   utilsr   r   r   configuration_yolosr   
get_loggerr#   r   r   Moduler2   rG   r   r@   r`   floatr   r   r   r   r   r  r
  r  r7  rM  rR  rq  r  __all__r"   r/   r0   <module>r     s!     ! , ,    ! 9 K F Q 9 9 , 
		H	% 
: : :B(bii (V299 :ryy B299 R %II%<<% 
% <<	%
 U\\*% % %>F FTbii &$RYY $P		 ""))  '+ 'TC
299 C
L *? * *0 R
% R
 R
j"))  RYY * 
}
2 }

}
@ Lr/   