
    Ph`                     :   d dl mZ d dlmZ d dlmZmZ d dlmZm	Z	m
Z
mZ d dlZddlmZmZmZ ddlmZmZmZmZmZ dd	lmZmZmZmZmZmZmZm Z m!Z!m"Z"m#Z# dd
l$m%Z% ddl&m'Z'm(Z(m)Z)m*Z*m+Z+m,Z, ddl-m.Z.  e+       rddlm/Z/  e)       rd dl0Z0 e*       rd dl1m2Z3 ddlm4Z4 ndZ4 e,jj                  e6      Z7 ed      dddddddddddejp                  fde	e9   de	e:   de	e9   de	ee:e;e:   f      de	ee:e;e:   f      de	e9   de	e   de	e9   de	e   de	d   de	ee<e'f      defd       Z=d3d d!d"e	e>   d#d!fd$Z?d%ee   d#e;e   fd&Z@d'e;d!   d#eAe>d(f   fd)ZBd*eej                  d!f   d+e>d#e;eej                  d!f      fd,ZD G d- d.e
d/0      ZEe( G d1 d2e             ZFy)4    )Iterable)deepcopy)	lru_cachepartial)AnyOptional	TypedDictUnionN   )BaseImageProcessorBatchFeatureget_size_dict)convert_to_rgbget_resize_output_image_sizeget_size_with_aspect_ratiogroup_images_by_shapereorder_images)ChannelDimension
ImageInput	ImageTypeSizeDictget_image_size#get_image_size_for_max_height_widthget_image_typeinfer_channel_dimension_formatmake_flat_list_of_imagesvalidate_kwargsvalidate_preprocess_arguments)Unpack)
TensorTypeauto_docstringis_torch_availableis_torchvision_availableis_vision_availablelogging)is_rocm_platform)PILImageResampling)
functional)pil_torch_interpolation_mapping
   maxsize
do_rescalerescale_factordo_normalize
image_mean	image_stddo_center_crop	crop_size	do_resizesizeinterpolationF.InterpolationModereturn_tensorsdata_formatc                     t        | |||||||||	
       |
|
dk7  rt        d      |t        j                  k7  rt        d      y)z
    Checks validity of typically used arguments in an `ImageProcessorFast` `preprocess` method.
    Raises `ValueError` if arguments incompatibility is caught.
    )
r-   r.   r/   r0   r1   r2   r3   r4   r5   r6   Nptz6Only returning PyTorch tensors is currently supported.z6Only channel first data format is currently supported.)r   
ValueErrorr   FIRST)r-   r.   r/   r0   r1   r2   r3   r4   r5   r6   r8   r9   s               b/var/www/html/saasai/venv/lib/python3.12/site-packages/transformers/image_processing_utils_fast.py"validate_fast_preprocess_argumentsr?   I   se    & "%!%# !n&<QRR&,,,QRR -    tensortorch.Tensoraxisreturnc                 n    || j                         S 	 | j                  |      S # t        $ r | cY S w xY w)zF
    Squeezes a tensor, but only if the axis specified has dim 1.
    )rC   )squeezer<   )rA   rC   s     r>   safe_squeezerG   p   s@     |~~~~4~(( s   & 44valuesc                 J    t        |  D cg c]  }t        |       c}S c c}w )zO
    Return the maximum value across all indices of an iterable of values.
    )zipmax)rH   values_is     r>   max_across_indicesrM   }   s$     +.v,7,hCM,777s    images.c                 b    t        | D cg c]  }|j                   c}      \  }}}||fS c c}w )zH
    Get the maximum height and width across all images in a batch.
    )rM   shape)rN   img_
max_height	max_widths        r>   get_max_height_widthrU      s9    
  22O3992OPAz9	"" 3Ps   ,image
patch_sizec                     g }t        | t        j                        \  }}t        d||      D ]9  }t        d||      D ]'  }| dd|||z   |||z   f   }|j	                  |       ) ; |S )a6  
    Divides an image into patches of a specified size.

    Args:
        image (`Union[np.array, "torch.Tensor"]`):
            The input image.
        patch_size (`int`):
            The size of each patch.
    Returns:
        list: A list of Union[np.array, "torch.Tensor"] representing the patches.
    )channel_dimr   N)r   r   r=   rangeappend)rV   rW   patchesheightwidthijpatchs           r>   divide_to_patchesrb      s|     G"56F6L6LMMFE1fj)q%,A!QZ/Q^1CCDENN5! - *
 Nr@   c                      e Zd ZU ee   ed<   eeeef      ed<   ee   ed<   ee	d      ed<   ee   ed<   eeeef      ed<   ee   ed<   ee	ee
f      ed	<   ee   ed
<   ee	e
ee
   f      ed<   ee	e
ee
   f      ed<   ee   ed<   eeeef      ed<   ee   ed<   ee	eef      ed<   ee   ed<   ee	eef      ed<   ed   ed<   ee   ed<   y)DefaultFastImageProcessorKwargsr4   r5   default_to_square)r'   r7   resampler2   r3   r-   r.   r/   r0   r1   do_padpad_sizedo_convert_rgbr8   r9   input_data_formattorch.devicedevicedisable_groupingN)__name__
__module____qualname__r   bool__annotations__dictstrintr
   floatlistr    r    r@   r>   rd   rd      s#   ~
4S>
""~%uHIJJTN"S#X''U3:.//4. ud5k1233eT%[0122TNtCH~&&TN"U3
?344*++c+;&; <==^$$tn$r@   rd   F)totalc            !           e Zd ZdZdZdZdZdZdZdZ	dZ
dZdZdZdZdZdZdZej&                  ZdZdZdgZeZdZdee   f fdZedefd       Z	 	 	 	 	 dAd	d
de de!e"   de!e#   dede!e   dd
fdZ$	 	 dBdd
de de!d   dedd
f
dZ%e&	 	 dBdd
de'e"e"f   de!d   dedd
f
d       Z(dd
de)dd
fdZ*dd
de+e)e,e)   f   de+e)e,e)   f   dd
fdZ- e.d      	 	 	 	 	 	 dCd e!e   d!e!e+e)e/e)   f      d"e!e+e)e/e)   f      d#e!e   d$e!e)   d%e!d&   de'fd'       Z0d	d
d#ed$e)d ed!e+e)e/e)   f   d"e+e)e/e)   f   dd
fd(Z1dd
de dd
fd)Z2de3de3fd*Z4de5fd+Z6	 dDd	e3d,e"de3fd-Z7	 	 	 dEde3d.e!e   d/e!e+e#ef      d%e!d&   dd
f
d0Z8	 	 	 	 dFd	e3d.e!e   d/e!e+e#ef      d%e!d&   d,e"de/d
   fd1Z9	 	 	 	 	 	 	 dGde!e    d2e!e    de!e    d3e!e   d!e!e+e)e/e)   f      d"e!e+e)e/e)   f      d4e!e   de5fd5Z:	 	 	 	 	 	 	 	 	 	 	 	 dHd#e!e   d$e!e)   d e!e   d!e!e+e)e'e)   f      d"e!e+e)e'e)   f      d6e!e   de!e    d7e!e   d2e!e    de!d   d8e!e+e#e;f      d4e!e   fd9Z<d	e3dee   de=fd:Z>e?d	e3dee   de=fd;       Z@dd<d	e3d.ed/ed%e!e+e#d&f      dee   de=fd=ZAd	e/d
   d6ede de!d   d7ed2e d#ed$e)d ed!e!e+e)e/e)   f      d"e!e+e)e/e)   f      d>e!e   de!e    de!e   d8e!e+e#e;f      de=f d?ZB fd@ZC xZDS )IBaseImageProcessorFastNTgp?pixel_valueskwargsc                    t        |   di | | j                  |      }|j                  d| j                        }|'t        ||j                  d| j                              nd | _        |j                  d| j                        }|t        |d      nd | _        |j                  d| j                        }|t        |d      nd | _        | j                  j                  D ]E  }|j                  |d       }|t        | ||       %t        | |t        t        | |d                    G t        | j                  j                  j                               | _        y )	Nr5   re   r5   re   r3   
param_namerh   r5   r   rx   )super__init__filter_out_unused_kwargspopr5   r   re   r3   rh   valid_kwargsrr   setattrr   getattrrw   keys_valid_kwargs_names)selfr}   r5   r3   rh   keykwarg	__class__s          r>   r   zBaseImageProcessorFast.__init__   s4   "6"..v6zz&$)),  tvzzBUW[WmWm7no 		
 JJ{DNN;	MVMby[Ihl::j$--8OWOc8
Kim$$44CJJsD)E c5)c8GD#t,D#EF 5 $((9(9(I(I(N(N(P#Q r@   rD   c                      y)zv
        `bool`: Whether or not this image processor is a fast processor (backed by PyTorch and TorchVision).
        Trx   )r   s    r>   is_fastzBaseImageProcessorFast.is_fast   s    
 r@   rN   rB   rh   
fill_valuepadding_modereturn_maskrm   c                    |@|j                   r|j                  st        d| d      |j                   |j                  f}nt        |      }t	        ||      \  }}	i }
i }|j                         D ]  \  }}|j                  dd }|d   |d   z
  }|d   |d   z
  }|dk  s|dk  rt        d| d	| d      ||k7  rdd||f}t        j                  ||||
      }||
|<   |sst        j                  |t        j                        ddddddf   }d|dd|d   d|d   f<   |||<    t        |
|	      }|rt        ||	      }||fS |S )a  
        Pads images to `(pad_size["height"], pad_size["width"])` or to the largest size in the batch.

        Args:
            images (`torch.Tensor`):
                Images to pad.
            pad_size (`SizeDict`, *optional*):
                Dictionary in the format `{"height": int, "width": int}` specifying the size of the output image.
            fill_value (`int`, *optional*, defaults to `0`):
                The constant value used to fill the padded area.
            padding_mode (`str`, *optional*, defaults to "constant"):
                The padding mode to use. Can be any of the modes supported by
                `torch.nn.functional.pad` (e.g. constant, reflection, replication).
            return_mask (`bool`, *optional*, defaults to `False`):
                Whether to return a pixel mask to denote padded regions.
            disable_grouping (`bool`, *optional*, defaults to `False`):
                Whether to disable grouping of images by size.

        Returns:
            `torch.Tensor`: The resized image.
        NzCPad size must contain 'height' and 'width' keys only. Got pad_size=.rm   r   r   zrPadding dimensions are negative. Please make sure that the `pad_size` is larger than the image size. Got pad_size=z, image_size=)fillr   dtype.)r]   r^   r<   rU   r   itemsrP   Fpadtorch
zeros_likeint64r   )r   rN   rh   r   r   r   rm   r}   grouped_imagesgrouped_images_indexprocessed_images_groupedprocessed_masks_groupedrP   stacked_images
image_sizepadding_heightpadding_widthpaddingstacked_masksprocessed_imagesprocessed_maskss                        r>   r   zBaseImageProcessorFast.pad   s   > OO #fgofppq!rss 8H+F3H/DV^n/o,,#% "$%3%9%9%;!E>'--bc2J%a[:a=8N$QK*Q-7M!]Q%6 008zzlRSU  X%a?!"~wZ^j!k.<$U+ % 0 0u{{ STWYZ\]_`T` aGHc?Z]?OjmOCD1>'.% &<( **BDXY,-DFZ[O#_44r@   rV   r5   r6   r7   	antialiasc                    ||nt         j                  j                  }|j                  r?|j                  r3t        |j                         dd |j                  |j                        }n|j                  r(t        ||j                  dt        j                        }n|j                  r?|j                  r3t        |j                         dd |j                  |j                        }n@|j                  r%|j                  r|j                  |j                  f}nt        d| d      t         j"                  j%                         rt'               r| j)                  ||||      S t        j*                  ||||      S )a@  
        Resize an image to `(size["height"], size["width"])`.

        Args:
            image (`torch.Tensor`):
                Image to resize.
            size (`SizeDict`):
                Dictionary in the format `{"height": int, "width": int}` specifying the size of the output image.
            interpolation (`InterpolationMode`, *optional*, defaults to `InterpolationMode.BILINEAR`):
                `InterpolationMode` filter to use when resizing the image e.g. `InterpolationMode.BICUBIC`.

        Returns:
            `torch.Tensor`: The resized image.
        Nr   F)r5   re   rj   zjSize must contain 'height' and 'width' keys, or 'max_height' and 'max_width', or 'shortest_edge' key. Got r   r6   r   )r   InterpolationModeBILINEARshortest_edgelongest_edger   r5   r   r   r=   rS   rT   r   r]   r^   r<   r   compileris_compilingr&   compile_friendly_resizeresize)r   rV   r5   r6   r   r}   new_sizes          r>   r   zBaseImageProcessorFast.resize7  s@   , *7)BH[H[HdHd$"3"3 2

RS!""!!H
 3''"'"2"8"8	H __:5::<;Ldoo_c_m_mnH[[TZZTZZ0H6  >>&&(-=-?//xPYZZxxx}PYZZr@   r   c                    | j                   t        j                  k(  r| j                         dz  } t	        j
                  | |||      } | dz  } t        j                  | dkD  d|       } t        j                  | dk  d|       } | j                         j                  t        j                        } | S t	        j
                  | |||      } | S )z{
        A wrapper around `F.resize` so that it is compatible with torch.compile when the image is a uint8 tensor.
           r      r   )	r   r   uint8rv   r   r   whereroundto)rV   r   r6   r   s       r>   r   z.BaseImageProcessorFast.compile_friendly_resizem  s     ;;%++% KKMC'EHHUHMU^_ECKE KKS%8EKK	1e4EKKM$$U[[1E  HHUHMU^_Er@   scalec                     ||z  S )a?  
        Rescale an image by a scale factor. image = image * scale.

        Args:
            image (`torch.Tensor`):
                Image to rescale.
            scale (`float`):
                The scaling factor to rescale pixel values by.

        Returns:
            `torch.Tensor`: The rescaled image.
        rx   )r   rV   r   r}   s       r>   rescalezBaseImageProcessorFast.rescale  s    $ u}r@   meanstdc                 0    t        j                  |||      S )a  
        Normalize an image. image = (image - image_mean) / image_std.

        Args:
            image (`torch.Tensor`):
                Image to normalize.
            mean (`torch.Tensor`, `float` or `Iterable[float]`):
                Image mean to use for normalization.
            std (`torch.Tensor`, `float` or `Iterable[float]`):
                Image standard deviation to use for normalization.

        Returns:
            `torch.Tensor`: The normalized image.
        )r   	normalize)r   rV   r   r   r}   s        r>   r   z BaseImageProcessorFast.normalize  s    * {{5$,,r@   r*   r+   r/   r0   r1   r-   r.   rl   rk   c                     |r>|r<t        j                  ||      d|z  z  }t        j                  ||      d|z  z  }d}|||fS )Nrl   g      ?F)r   rA   )r   r/   r0   r1   r-   r.   rl   s          r>   !_fuse_mean_std_and_rescale_factorz8BaseImageProcessorFast._fuse_mean_std_and_rescale_factor  sO     ,j@C.DXYJYv>#BVWIJ9j00r@   c                     | j                  ||||||j                        \  }}}|r3| j                  |j                  t        j
                        ||      }|S |r| j                  ||      }|S )z/
        Rescale and normalize images.
        )r/   r0   r1   r-   r.   rl   r   )r   rl   r   r   r   float32r   )r   rN   r-   r.   r/   r0   r1   s          r>   rescale_and_normalizez,BaseImageProcessorFast.rescale_and_normalize  s     -1,R,R%!!)== -S -
)
Iz ^^FIIEMMI$BJPYZF  \\&.9Fr@   c                 4   |j                   |j                  t        d|j                                |j                  dd \  }}|j                   |j                  }}||kD  s||kD  rv||kD  r||z
  dz  nd||kD  r||z
  dz  nd||kD  r||z
  dz   dz  nd||kD  r||z
  dz   dz  ndg}t        j                  ||d      }|j                  dd \  }}||k(  r||k(  r|S t        ||z
  dz        }	t        ||z
  dz        }
t        j                  ||	|
||      S )	a  
        Note: override torchvision's center_crop to have the same behavior as the slow processor.
        Center crop an image to `(size["height"], size["width"])`. If the input size is smaller than `crop_size` along
        any edge, the image is padded with 0's and then center cropped.

        Args:
            image (`"torch.Tensor"`):
                Image to center crop.
            size (`dict[str, int]`):
                Size of the output image.

        Returns:
            `torch.Tensor`: The center cropped image.
        Nz=The size dictionary must have keys 'height' and 'width'. Got r      r   r   )r   g       @)	r]   r^   r<   r   rP   r   r   ru   crop)r   rV   r5   r}   image_heightimage_widthcrop_height
crop_widthpadding_ltrbcrop_top	crop_lefts              r>   center_cropz"BaseImageProcessorFast.center_crop  sS   ( ;;$**"4\]a]f]f]h\ijkk$)KK$4!k"&++tzzZ#{\'A3=3Kk)a/QR5@<5O|+1UV7AK7Ok)A-!3UV9D|9S|+a/A5YZ	L EE%A6E(-BC(8%L+[([L-H{2c9:z1S89	vveXy+zJJr@   c                     t        |      S )a'  
        Converts an image to RGB format. Only converts if the image is of type PIL.Image.Image, otherwise returns the image
        as is.
        Args:
            image (ImageInput):
                The image to convert.

        Returns:
            ImageInput: The converted image.
        )r   )r   rV   s     r>   r   z%BaseImageProcessorFast.convert_to_rgb  s     e$$r@   c                     | j                   |S | j                   D ]1  }||v st        j                  d| d       |j                  |       3 |S )zJ
        Filter out the unused kwargs from the kwargs dictionary.
        z!This processor does not use the `z ` parameter. It will be ignored.)unused_kwargsloggerwarning_oncer   )r   r}   
kwarg_names      r>   r   z/BaseImageProcessorFast.filter_out_unused_kwargs  sY     %M,,JV###&G
|Ss$tu

:& - r@   expected_ndimsc                 >    | j                  |      }t        ||      S )z
        Prepare the images structure for processing.

        Args:
            images (`ImageInput`):
                The input images to process.

        Returns:
            `ImageInput`: The images with a valid nesting.
        r   )fetch_imagesr   )r   rN   r   s      r>   _prepare_images_structurez0BaseImageProcessorFast._prepare_images_structure$  s!      ""6*'~NNr@   ri   rj   c                 f   t        |      }|t        j                  t        j                  t        j                  fvrt        d|       |r| j                  |      }|t        j                  k(  rt        j                  |      }n6|t        j                  k(  r#t        j                  |      j                         }|j                  dk(  r|j                  d      }|t        |      }|t        j                   k(  r!|j#                  ddd      j                         }||j%                  |      }|S )NzUnsupported input image type r   r   r   )r   r   PILTORCHNUMPYr<   r   r   pil_to_tensorr   
from_numpy
contiguousndim	unsqueezer   r   LASTpermuter   )r   rV   ri   rj   rl   
image_types         r>   _process_imagez%BaseImageProcessorFast._process_image7  s    $E*
immY__iooNN<ZLIJJ''.E&OOE*E9??*$$U+668E ::?OOA&E $ >u E 0 5 55MM!Q*557E HHV$Er@   c           
      L   | j                  ||      }t        | j                  |||      }t        |      dkD  xr t	        |d   t
        t        f      }|r'|D 	cg c]  }|D 	cg c]
  }	 ||	       c}	 }
}}	|
S |D 	cg c]
  }	 ||	       }
}	|
S c c}	w c c}	}w c c}	w )a  
        Prepare image-like inputs for processing.

        Args:
            images (`ImageInput`):
                The image-like inputs to process.
            do_convert_rgb (`bool`, *optional*):
                Whether to convert the images to RGB.
            input_data_format (`str` or `ChannelDimension`, *optional*):
                The input data format of the images.
            device (`torch.device`, *optional*):
                The device to put the processed images on.
            expected_ndims (`int`, *optional*):
                The expected number of dimensions for the images. (can be 2 for segmentation maps etc.)

        Returns:
            List[`torch.Tensor`]: The processed images.
        r   ri   rj   rl   r   )r   r   r   len
isinstancerw   tuple)r   rN   ri   rj   rl   r   process_image_partialhas_nested_structurenested_listrQ   r   s              r>   _prepare_image_like_inputsz1BaseImageProcessorFast._prepare_image_like_inputs]  s    8 //~/V 'Rclr!

  #6{QW:fQi$PU3WgmngmXc{ S{!6s!;{ Sgmn   GMMfs 5c :fM	 !TnMs   	B&B5BB!Br3   re   r9   c                    |i }|t        di t        ||      }|t        di t        |d      }|t        di t        |d      }t        |t              rt	        |      }t        |t              rt	        |      }|t
        j                  }||d<   ||d<   ||d<   ||d<   ||d<   ||d	<   |j                  d
      }	t        |	t        t        f      r	t        |	   n|	|d<   |S )z
        Update kwargs that need further processing before being validated
        Can be overridden by subclasses to customize the processing of kwargs.
        r   r3   r   rh   r   r5   r0   r1   r9   rf   r6   rx   )r   r   r   rw   r   r   r=   r   r'   ru   r)   )
r   r5   r3   rh   re   r0   r1   r9   r}   rf   s
             r>   _further_process_kwargsz.BaseImageProcessorFast._further_process_kwargs  s    >F\mIZ[\D  T={#STIV-X*"UVHj$'z*Ji&i(I*00Kv'{%z)|'{ +} ::j)9CHOacfNg9h+H5nv 	 r@   r4   r2   r8   c                 2    t        |||||||||	|
||       y)z@
        validate the kwargs for the preprocess method.
        )r-   r.   r/   r0   r1   r4   r5   r2   r3   r6   r8   r9   N)r?   )r   r-   r.   r/   r0   r1   r4   r5   r2   r3   r6   r8   r9   r}   s                 r>   _validate_preprocess_kwargsz2BaseImageProcessorFast._validate_preprocess_kwargs  s0    & 	+!)%!)')#	
r@   c                 .     | j                   |g|i |S N)
preprocess)r   rN   argsr}   s       r>   __call__zBaseImageProcessorFast.__call__  s    tv7777r@   c           	         t        |j                         | j                         | j                  D ]  }|j                  |t	        | |d              ! |j                  d      }|j                  d      }|j                  d      } | j                  di |} | j                  di | |j                  d        | j                  |g||||d|S )N)captured_kwargsvalid_processor_keysri   rj   rl   r9   r   rx   )	r   r   r   
setdefaultr   r   r   r   _preprocess_image_like_inputs)r   rN   r  r}   r   ri   rj   rl   s           r>   r   z!BaseImageProcessorFast.preprocess  s     	DLdLde 22Jj'$
D*IJ 3  $45"JJ':;H% .--77 	)((262 	

=!1t11

*8L]fl
pv
 	
r@   r   c                X    | j                  ||||      } | j                  |g|i |S )z
        Preprocess image-like inputs.
        To be overridden by subclasses when image-like inputs other than images should be processed.
        It can be used for segmentation maps, depth maps, etc.
        )rN   ri   rj   rl   )r   _preprocess)r   rN   ri   rj   rl   r  r}   s          r>   r  z4BaseImageProcessorFast._preprocess_image_like_inputs  sC     00.L]fl 1 
  t8888r@   rg   c           	         t        ||      \  }}i }|j                         D ]   \  }}|r| j                  |||      }|||<   " t        ||      }t        ||      \  }}i }|j                         D ]4  \  }}|r| j	                  ||      }| j                  ||||	|
|      }|||<   6 t        ||      }|r| j                  |||      }|rt        j                  |d      n|}t        d|i|      S )Nr   )rV   r5   r6   )rh   rm   r   )dimr|   )datatensor_type)
r   r   r   r   r   r   r   r   stackr   )r   rN   r4   r5   r6   r2   r3   r-   r.   r/   r0   r1   rg   rh   rm   r8   r}   r   r   resized_images_groupedrP   r   resized_imagesr   r   s                            r>   r	  z"BaseImageProcessorFast._preprocess  s3   ( 0EV^n/o,,!#%3%9%9%;!E>!%>\i!j,:"5) &< ((>@TU 0E^fv/w,,#% %3%9%9%;!E>!%!1!1.)!L!77
NL*V_N /=$U+ &< **BDXY#xx(88^nxoCQ5;;'7Q?Wg.2B!CQ_``r@   c                 l    t         |          }|j                  dd        |j                  dd        |S )N_valid_processor_keysr   )r   to_dictr   )r   encoder_dictr   s     r>   r  zBaseImageProcessorFast.to_dict=  s7    w(0$7.5r@   )Nr   constantFF)NT)NNNNNN)   )NNN)NNNr  )NNNNNNN)NNNNNNNNNNNN)Ern   ro   rp   rf   r0   r1   r5   re   r3   r4   r2   rg   rh   r-   r.   r/   ri   r8   r   r=   r9   rj   rl   model_input_namesrd   r   r   r   r   propertyrq   r   r   r   ru   rt   r   r   staticmethodr   r   rv   r   r
   r   r   r   rw   r   r   r   r   r   rs   r   r   r   r   r   r    r   r   r  r!   r   r  r	  r  __classcell__)r   s   @r>   r{   r{      s   HJIDIINFHJNLNN"((KF'(2LMR(G!H R0    "$%&0!+0B B  B  SM	B 
 smB  B  #4.B  
B P :>4[4[ 4[   56	4[
 4[ 
4[l  :>	S/   56 	
 
 0 
 
(-- E8E?*+- 5(5/)*	- 
-. r (,:>9=%)*.+/1tn1 U5$u+#5671 E%e"456	1
 TN1 !1 (1 
1 1   	
  %e,- U+, 
8'K'K 'K
 
'KR%% 
% t    OO O 
	O, *.DH+/$$ !$ $E#/?*?$@A	$
 ($ 
$R *.DH+/* *  !*  $E#/?*?$@A	* 
 (*  *  
n	* \ $((,'+,0:>9=26.x . H%. 8$	.
 $D>. U5$u+#567. E%e"456. ./. 
.d &**.'+;?:>$(#')-(,9=;?26 
TN 
 ! 
 tn	 

 U5%,#678 
 E%u"567 
 D> 
 x  
 ! 
 H% 
   56 
 !sJ!78 
 ./ 
D8z 8FCb<c 8ht 8 
 
fEd>e 
jv 
 
@ 8<99 	9
 ,9 sN2349 899 
9(.a^$.a .a 	.a
   56.a .a .a .a .a .a U5$u+#567.a E%e"456.a .a 8$.a #4..a  !sJ!78!.a$ 
%.a` r@   r{   r   )Gcollections.abcr   copyr   	functoolsr   r   typingr   r   r	   r
   numpynpimage_processing_utilsr   r   r   image_transformsr   r   r   r   r   image_utilsr   r   r   r   r   r   r   r   r   r   r   processing_utilsr   utilsr    r!   r"   r#   r$   r%   utils.import_utilsr&   r'   r   torchvision.transforms.v2r(   r   r)   
get_loggerrn   r   r=   rq   rv   rw   rt   r?   ru   rG   rM   r   rU   ndarrayrb   rd   r{   rx   r@   r>   <module>r*     so   %  ( 2 2  S S     %  1 /9< '+# 
		H	% 2!%&*#'6:59%)$( $#597;$4$:$:#S#SUO#S 4.#S ud5k123	#S
 eT%[012#S TN#S !#S ~#S 8
#S 12#S U3
?34#S "#S #SL
 
x} 
 
8x} 8c 8#n!5 #%S/ #^+,:=	%

N*
+,0%iu %, D
/ D
 D
r@   