
    i3                         d dl Z d dlmZ d dlmZ d dlZd dlmZ d dl	m
Z
 d dlmZ dgZ edd	      Z G d
 dee         Zy)    N)Iterator)TypeVar)Dataset)SamplerDistributedSampler_T_coT)	covariantc                   z    e Zd ZdZ	 	 	 	 	 ddededz  dedz  dededed	dfd
Zd	ee	   fdZ
d	efdZded	dfdZy)r   a'	  Sampler that restricts data loading to a subset of the dataset.

    It is especially useful in conjunction with
    :class:`torch.nn.parallel.DistributedDataParallel`. In such a case, each
    process can pass a :class:`~torch.utils.data.DistributedSampler` instance as a
    :class:`~torch.utils.data.DataLoader` sampler, and load a subset of the
    original dataset that is exclusive to it.

    .. note::
        Dataset is assumed to be of constant size and that any instance of it always
        returns the same elements in the same order.

    Args:
        dataset: Dataset used for sampling.
        num_replicas (int, optional): Number of processes participating in
            distributed training. By default, :attr:`world_size` is retrieved from the
            current distributed group.
        rank (int, optional): Rank of the current process within :attr:`num_replicas`.
            By default, :attr:`rank` is retrieved from the current distributed
            group.
        shuffle (bool, optional): If ``True`` (default), sampler will shuffle the
            indices.
        seed (int, optional): random seed used to shuffle the sampler if
            :attr:`shuffle=True`. This number should be identical across all
            processes in the distributed group. Default: ``0``.
        drop_last (bool, optional): if ``True``, then the sampler will drop the
            tail of the data to make it evenly divisible across the number of
            replicas. If ``False``, the sampler will add extra indices to make
            the data evenly divisible across the replicas. Default: ``False``.

    .. warning::
        In distributed mode, calling the :meth:`set_epoch` method at
        the beginning of each epoch **before** creating the :class:`DataLoader` iterator
        is necessary to make shuffling work properly across multiple epochs. Otherwise,
        the same ordering will be always used.

    Example::

        >>> # xdoctest: +SKIP
        >>> sampler = DistributedSampler(dataset) if is_distributed else None
        >>> loader = DataLoader(dataset, shuffle=(sampler is None),
        ...                     sampler=sampler)
        >>> for epoch in range(start_epoch, n_epochs):
        ...     if is_distributed:
        ...         sampler.set_epoch(epoch)
        ...     train(loader)
    Ndatasetnum_replicasrankshuffleseed	drop_lastreturnc                    |3t        j                         st        d      t        j                         }|3t        j                         st        d      t        j                         }||k\  s|dk  rt        d| d|dz
   d      || _        || _        || _        d| _	        || _
        | j                  rmt        | j                        | j                  z  dk7  rHt        j                  t        | j                        | j                  z
  | j                  z        | _        n:t        j                  t        | j                        | j                  z        | _        | j                  | j                  z  | _        || _        || _        y )Nz,Requires distributed package to be availabler   zInvalid rank z%, rank should be in the interval [0,    ])distis_availableRuntimeErrorget_world_sizeget_rank
ValueErrorr   r   r   epochr   lenmathceilnum_samples
total_sizer   r   )selfr   r   r   r   r   r   s          V/var/www/html/engine/venv/lib/python3.12/site-packages/torch/utils/data/distributed.py__init__zDistributedSampler.__init__B   sZ    $$&"#QRR..0L<$$&"#QRR==?D<4!8v%J<Z[K[J\\]^  (	
" >>c$,,/$2C2CCqH  $yyT\\"T%6%66$:K:KK D  $yyT\\):T=N=N)NOD**T->->>	    c                    | j                   rut        j                         }|j                  | j                  | j
                  z          t        j                  t        | j                        |      j                         }n't        t        t        | j                                    }| j                  sZ| j                  t        |      z
  }|t        |      k  r	||d | z  }n:||t        j                  |t        |      z        z  d | z  }n|d | j                   }t        |      | j                  k7  r%t!        dt        |       d| j                   d      || j"                  | j                  | j$                     }t        |      | j&                  k7  r%t!        dt        |       d| j&                   d      t)        |      S )N)	generatorzNumber of indices (z) does not match total_size ()zNumber of subsampled indices (z) does not match num_samples ()r   torch	Generatormanual_seedr   r   randpermr   r   tolistlistranger   r    r   r   AssertionErrorr   r   r   iter)r!   gindicespadding_sizes       r"   __iter__zDistributedSampler.__iter__k   s   <<!AMM$))djj01nnS%6!DKKMG5T\\!234G~~??S\9Ls7|+7=L11Gdiis7|0K&LL!\ 
 /0Gw<4??* %c'l^3PQUQ`Q`Paabc 
 $))doo8I8IIJw<4+++ 0W>\]a]m]m\nnop 
 G}r$   c                     | j                   S )N)r   )r!   s    r"   __len__zDistributedSampler.__len__   s    r$   r   c                     || _         y)a1  
        Set the epoch for this sampler.

        When :attr:`shuffle=True`, this ensures all replicas
        use a different random ordering for each epoch. Otherwise, the next iteration of this
        sampler will yield the same ordering.

        Args:
            epoch (int): Epoch number.
        N)r   )r!   r   s     r"   	set_epochzDistributedSampler.set_epoch   s     
r$   )NNTr   F)__name__
__module____qualname____doc__r   intboolr#   r   r   r4   r6   r8    r$   r"   r   r      s    .f $('' Dj' Dj	'
 ' ' ' 
'R"(5/ "H   s t r$   )r   collections.abcr   typingr   r(   torch.distributeddistributedr   torch.utils.data.datasetr   torch.utils.data.samplerr   __all__r   r   r?   r$   r"   <module>rG      sD     $     , ,  
  	4(L Lr$   