
    <hr                     *    S SK Jr  SSKJr  S\4S jrg)    )
DataLoader   )is_torch_xla_available
dataloaderc                     [        5       (       ac  SS KJs  Jn  [	        XR
                  5      (       d   S5       eSS KJs  Jn  UR                  UR                  5       S5      nX0R                  S'   U $ U $ )Nr   zPThe dataloader must be a `torch_xla.distributed.parallel_loader.MpDeviceLoader`.)fsdpNinput_sharding)r   %torch_xla.distributed.parallel_loaderdistributedparallel_loader
isinstanceMpDeviceLoadertorch_xla.distributed.spmdspmdShardingSpecget_global_mesh_parallel_loader_kwargs)r   plxssharding_specs       U/var/www/html/shao/venv/lib/python3.13/site-packages/transformers/integrations/tpu.pytpu_spmd_dataloaderr      sn    ::*&7&788 	
^	
8 	0/(:(:(<nM?L**+;<    N)torch.utils.datar   utilsr   r    r   r   <module>r      s    ( *J r   