
    i!              	           d dl mZ d dlZd dlZd dlmZ d dlmZ	 ddgZ
 G d dej                        Zej                  j                  ed       ddd	d
ede	dz  dedz  ddfdZy)    )AnyN)
_to_dlpack)DeviceDLDeviceTypefrom_dlpackc                   H    e Zd ZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZdZdZdZdZy)r   )   )   )   )   )   )   )	   )
   )   )   )   )   )   )   )   N)__name__
__module____qualname__kDLCPUkDLCUDAkDLCUDAHost	kDLOpenCL	kDLVulkankDLMetalkDLVPIkDLROCMkDLROCMHost	kDLExtDevkDLCUDAManaged	kDLOneAPI	kDLWebGPU
kDLHexagonkDLMAIA     L/var/www/html/engine/venv/lib/python3.12/site-packages/torch/utils/dlpack.pyr   r      sM    FGKIIHFGKINIIJGr+   a  to_dlpack(tensor) -> PyCapsule

Returns an opaque object (a "DLPack capsule") representing the tensor.

.. note::
  ``to_dlpack`` is a legacy DLPack interface. The capsule it returns
  cannot be used for anything in Python other than use it as input to
  ``from_dlpack``. The more idiomatic use of DLPack is to call
  ``from_dlpack`` directly on the tensor object - this works when that
  object has a ``__dlpack__`` method, which PyTorch and most other
  libraries indeed have now.

.. warning::
  Only call ``from_dlpack`` once per capsule produced with ``to_dlpack``.
  Behavior when a capsule is consumed multiple times is undefined.

Args:
    tensor: a tensor to be exported

The DLPack capsule shares the tensor's memory.
)devicecopy
ext_tensorr-   r.   returnztorch.Tensorc                
   t        | d      ri }d|d<   |}d}d}|||d<   | j                         }|t        |t              rt	        j
                  |      }t        |t        j
                        st        dt        |             t        j                  j                  |      }||k7  }|s||d	<   |r|du rt        d
| d| d      |d   t        j                  t        j                  fv r_t        j                  j                  d|d          }	|d   t        j                  k(  }
|
r|	j                   dk(  rdn|	j                   }||d<   d}	  | j"                  di |}|%|j'                  dd       	  | j"                  di |}|'|j'                  dd       d}	  | j"                  di |}|$|j'                  d	d        | j"                  di |}t        j                  j)                  |      }|du r|s|s|j+                         }|r|j-                  |      }|S ||t        d      | }t        j                  j)                  |      S # t$        $ r Y w xY w# t$        $ r Y w xY w# t$        $ r Y w xY w)a  from_dlpack(ext_tensor) -> Tensor

    Converts a tensor from an external library into a ``torch.Tensor``.

    The returned PyTorch tensor will share the memory with the input tensor
    (which may have come from another library). Note that in-place operations
    will therefore also affect the data of the input tensor. This may lead to
    unexpected issues (e.g., other libraries may have read-only flags or
    immutable data structures), so the user should only do this if they know
    for sure that this is fine.

    Args:
        ext_tensor (object with ``__dlpack__`` attribute, or a DLPack capsule):
            The tensor or DLPack capsule to convert.

            If ``ext_tensor`` is a tensor (or ndarray) object, it must support
            the ``__dlpack__`` protocol (i.e., have a ``ext_tensor.__dlpack__``
            method). Otherwise ``ext_tensor`` may be a DLPack capsule, which is
            an opaque ``PyCapsule`` instance, typically produced by a
            ``to_dlpack`` function or method.

        device (torch.device or str or None): An optional PyTorch device
            specifying where to place the new tensor. If None (default), the
            new tensor will be on the same device as ``ext_tensor``.

        copy (bool or None): An optional boolean indicating whether or not to copy
            ``self``. If None, PyTorch will copy only if necessary.

    Examples::

        >>> import torch.utils.dlpack
        >>> t = torch.arange(4)

        # Convert a tensor directly (supported in PyTorch >= 1.10)
        >>> t2 = torch.from_dlpack(t)
        >>> t2[:2] = -1  # show that memory is shared
        >>> t2
        tensor([-1, -1,  2,  3])
        >>> t
        tensor([-1, -1,  2,  3])

        # The old-style DLPack usage, with an intermediate capsule object
        >>> capsule = torch.utils.dlpack.to_dlpack(t)
        >>> capsule
        <capsule object "dltensor" at ...>
        >>> t3 = torch.from_dlpack(capsule)
        >>> t3
        tensor([-1, -1,  2,  3])
        >>> t3[0] = -9  # now we're sharing memory between 3 tensors
        >>> t3
        tensor([-9, -1,  2,  3])
        >>> t2
        tensor([-9, -1,  2,  3])
        >>> t
        tensor([-9, -1,  2,  3])

    
__dlpack__)r	   r   max_versionTFNr.   z&from_dlpack: unsupported device type: 	dl_devicez&cannot move DLPack tensor from device z to z- without copying. Set copy=None or copy=True.r   zcuda:r	   streamzQdevice and copy kwargs not supported when ext_tensor is already a DLPack capsule.r*   )hasattr__dlpack_device__
isinstancestrtorchr-   AssertionErrortype_C_torchDeviceToDLDevice
ValueErrorr   r   r"   cudacurrent_streamcuda_streamr2   	TypeErrorpop_from_dlpackcloneto)r/   r-   r.   kwargsrequested_copyproducer_handled_copycross_device_transfer
ext_devicetarget_dl_devicer5   is_cuda
stream_ptrdlpacktensors                 r,   r   r   :   s   @ z<( "$ &}  $ %!F6N  113
&#&f-fell3$'MdSYl^%\]]  %xx>>vF &03C%C! )&6{# % <ZLM]L^ _C C  a=\11<3G3GHHZZ..z!}o/FGF !m|';';;G &&*<*<*AvGYGYJ)F8 	*Z**4V4F
 >JJ}d+...88
 >JJvt$$)!...88
 >JJ{D)*Z**4V4F&&v. T!*?H]\\^F !YYv&F !1 c  xx$$V,,Y  		    s6   I ;I' $I6 	I$#I$'	I32I36	JJ)typingr   r:   enumtorch._Cr   	to_dlpacktorch.typesr   _Device__all__IntEnumr   r=   _add_docstrboolr   r*   r+   r,   <module>r\      s       , ) 
4<< &   Y ! 8 "	m-m- dNm- +	m-
 m-r+   