
    iiW                        d dl Z d dlZd dlZd dlmZmZmZ d dlmZm	Z	m
Z
mZmZ d dlZd dlmc mZ d dlZd dlmZmZ d dlmZ e j.                   G d d             Z G d d	      Zd
edefdZdedeeef   fdZdedefdZdede fdZ!dddede de fdZ"dede fdZ#dede fdZ$defdZ%d Z&dejN                  deedf   de(eef   deeejR                  ef      fdZ*d  Z+dede fd!Z,de fd"Z-d# Z.dejN                  fd$Z/dejN                  fd%Z0dejN                  de fd&Z1dejN                  deedf   fd'Z2	 d>dee   de(eef   d(edeejf                     fd)Z4d* fd+Z5d, fd-Z6 G d. d/      Z7d0ejf                  dejf                  fd1Z8dejr                  j,                  de fd2Z:dejN                  dee;e   e;e   f   fd3Z<ejz                  j|                  ejz                  j~                  ejz                  j                  ejz                  j                  gZBed4d5d6ed7e	d4   dejz                  fd8       ZCed6ed7e	d   de
ejz                     fd9       ZCd4d5d:ZCej                  ej                  ej                  ej                  ej                  ej                  ej                  ej                  ej                  ej                  ej                  hZOddd4d;dede
eedf      de
e(eef      d<e de f
d=ZPy)?    N)CallableIterableIterator)AnyLiteralOptionaloverloadUnion)_C_utils_internal)
OpOverloadc                   ,    e Zd ZU dZeed<   eed<   d Zy)Kernelz$Models a (function, source location)funcsourcec                 &     | j                   |i |S N)r   )selfargskwargss      N/var/www/html/engine/venv/lib/python3.12/site-packages/torch/_library/utils.py__call__zKernel.__call__   s    tyy$)&))    N)__name__
__module____qualname____doc__r   __annotations__strr    r   r   r   r      s    .
NK*r   r   c                   $    e Zd ZdZdefdZddZy)RegistrationHandlez2Does something when someone calls .destroy() on it
on_destroyc                     || _         y r   _on_destroy)r   r#   s     r   __init__zRegistrationHandle.__init__   s
    %r   Nc                 $    | j                          y r   r%   )r   s    r   destroyzRegistrationHandle.destroy    s    r   )returnN)r   r   r   r   r   r'   r)   r    r   r   r"   r"      s    <&8 &r   r"   
stacklevelr*   c                     t        j                  t        j                  |             }|j                   d|j
                   }|S )zGet a string that represents the caller.

    Example: "/path/to/foo.py:42"

    Use stacklevel=1 to get the caller's source
    Use stacklevel=2 to get the caller's caller's source
    etc.
    :)inspectgetframeinfosys	_getframefilenamelineno)r+   framer   s      r   
get_sourcer5   $   s;       z!:;Eq/FMr   qualnamec                 r    | j                  d      }t        |      dk7  rt        d|  d      |d   |d   fS )Nz::   zAExpected `qualname` to be of the form "namespace::name", but got zf. The qualname passed to the torch.library APIs must consist of a namespace and a name, e.g. aten::sinr      )splitlen
ValueError)r6   splitss     r   parse_namespacer>   2   sR    ^^D!F
6{a**2 489
 	
 !9fQir   c                     t        |       \  }}d|v r|j                  d      \  }}nd}t        t        j                  |      }t        ||      }t        ||      S )N.default)r>   r:   getattrtorchops)r6   	namespacenamer	   nspackets         r   	lookup_oprI   >   sV    %h/OIt
d{Ch	I	&BRF68$$r   opc                 B    t        | t              sJ | j                  dv S )N>   atenprimprims)
isinstancer   rE   )rJ   s    r   
is_builtinrP   I   s"    b*%%%<<444r   F)allow_valid_viewschemarQ   c                    fd}t        | t        j                  j                        r ||       S ddlm} t        | t
              r|j                  |       } t        | |      sJ  ||       S )af  Check if the schema is functional.

    An operator is functional if:
    - it does not mutate any of its inputs
    - If no view are allowed
        - it does not return a view on any of its inputs
    - If valid views are allowed
        - it is not a view or a view with a single input Tensor and single output Tensor
    - it has at least one return
    c                    | j                   ry| j                  }t        |      dkD  xr t        d |D              }d}d}t	        | t
        j                        rw| j                  D ],  }t	        |j                  t
        j                        s(|dz  }. | j                  D ],  }t	        |j                  t
        j                        s(|dz  }. nt	        | t        j                  j                        rl| j                  j                  D ]"  }|j                  j                         s|dz  }$ | j                  D ]"  }|j                  j                         s|dz  }$ |r	xr |dk(  xr |dk(  S | j                  syy)NFr   c              3   j   K   | ]+  }|j                   d uxr |j                   j                    - y wr   )
alias_infois_write).0rs     r   	<genexpr>z>is_functional_schema.<locals>.is_functional.<locals>.<genexpr>^   s3      5
GHALL$BQ\\-B-B)BB5
s   13r9   T)
is_mutablereturnsr;   anyrO   rC   FunctionSchema	argumentstype
TensorTypetorchgenmodelflat_non_outis_tensor_like)
rR   retsis_non_mutating_viewnum_tensor_inputsnum_tensor_outputsargretargumentret_argrQ   s
            r   is_functionalz+is_functional_schema.<locals>.is_functionalZ   su   ~~"4y1}  
 5
LP5
 2
 fe223'' +chh(8(89%*%+ ~~ ,chh(8(89&!+&,  = =>",,99 +==//1%*%+ ">> ,<<..0&!+&,  # !Q&B+=+B ~~r   r   )r^   )rO   rC   r   r^   torchgen.modelr   parse)rR   rQ   rn   r^   s    `  r   is_functional_schemarq   N   sd    "H &%((112V$$ .&#%%f-fn---  r   typc           	      F   | t        j                  t         j                  j                               k(  xs | t        j                  t        j                  t         j                  j                                     k(  xs | t        j                  t        j                  t         j                  j                                     k(  xsZ | t        j                  t        j                  t        j                  t         j                  j                                           k(  S r   )r   ListTypera   getOptionalTyperr   s    r   is_tensorlist_like_typerx      s    r{{2==,,.// 	U"++boobmm.?.?.ABCC	U"//"++bmm.?.?.A"BCC	U "//"++boobmm>O>O>Q.R"STT	r   c                     | t         j                  j                         k(  xs4 | t        j                  t         j                  j                               k(  S r   )r   ra   ru   rv   rw   s    r   is_tensor_like_typerz      s:    "--##%%T@Q@Q@S0T)TTr   c                    | j                   dk7  ry| j                  }t        |j                        dk7  ry|j                  d   j                  y|j                  d   j                  j
                  }t        |      dk7  ryt        t        |            }t        |j                        dk  ry|j                  d   }|j                  y|j                  j                  sy|j                  j
                  }t        |      dk7  ry|t        t        |            k7  ry|j                  dd D ]  }|j                   y y)aN  Check if an op is an inplace aten op, i.e. it mutates and returns the first arg.

    TODO: torchgen/model.py's FunctionSchema.parse is the source of truth for this,
    but not all PyTorch builds have torchgen (due to the yaml dependency being weird).
    Figure this out.

    Example: add_(Tensor(a!) x, Tensor y) -> Tensor(a)
    rL   Fr9   r   NT)
rE   _schemar;   r\   rV   	after_setnextiterr_   rW   )rJ   rR   	alias_setloc	first_argrj   s         r   mutates_and_returns_first_argr      s(    
||vZZF
6>>a~~a##+q!,,66I
9~
tI
C
6q   #I#(($$..I
9~
d4	?### >>% r   c                    g }i }t        t        | j                              D ]  }| j                  |   }|j                  rE|j                  |v r||j                     ||j                  <   I|j
                  ||j                  <   c|t        |      k  r|j                  ||          |j                  |j
                          t        |      |fS r   )ranger;   r_   
kwarg_onlyrF   default_valueappendtuple)rR   r   r   new_args
new_kwargsiinfos          r   fill_defaultsr      s    HJ3v''() 4"??yyF"(.tyy(9
499%(,(:(:
499%3t9}Q( 2 234 ?J&&r   r   .r   c              #     K   t        | j                        t        |      t        |      z   k\  sJ t        t        | j                              D ]  }| j                  |   }|j                  r"|j                  |v r|||j                     f @|t        |      k\  r.|j                  s!|j                  |v r|||j                     f ||||   f  yw)zzips schema.arguments and (args, kwargs) together.

    Assumes that (args, kwargs) were the inputs to some torch._ops.OpOverload:
    that is, (args, kwargs) must be bindable to the schema (args, kwargs).
    N)r;   r_   r   r   rF   )rR   r   r   r   r   s        r   
zip_schemar      s      v CIF$;;;;3v''() "??yyF"F499---D	>??tyyF':F499--- DGm s   CCc           	      \   ddl m} | j                  }t        |t        j
                  j                        st        d      d }g }| j                  D ]  }t        |t        j                  j                  t        j                  j                  j                  f      r|j                   ||             ct        |t        j                  j                  j                  t        t         f      r&|j                  |D cg c]
  } ||       c}       t        dt#        |               t%        j&                  |j(                        j*                  | } ||       }|j-                  |j.                  t!        |j0                  j3                               t        |      f      S c c}w )Nr   )FunctionSchemaGenzfx_node's target must be a hop.c                     | j                   j                  dd       }|;| j                  dk(  sJ t        | j                  j
                  | j                        }|S )Nvalget_attr)metaru   rJ   rB   graphowning_moduletarget)nodemeta_vals     r   _collect_example_valz5hop_schema_from_fx_node.<locals>._collect_example_val   sM    99==-77j(((tzz77EHr   zUnsupported arg type )torchgen.gen_schema_utilsr   r   rO   rC   _opsHigherOrderOperatorRuntimeErrorr   fxNoder   r   immutable_collectionsimmutable_listlistr   r`   r.   	signaturer   bindfrom_example_namer_   items)	r   r   hopr   example_inputsrj   x
bound_argsexample_outputs	            r   hop_schema_from_fx_noder      sW   ;
++Cc5::99:<== Nyy DcEHHMM588==+=+=>?!!"6s";<%((00??uM
 !!C"Hq#7#:"HI!6tCykBCCD *N):):3<<)H)M)M	*J *$/N))		5--3356n9M8O  #Is   >F)
c                     t        | t              sJ t        |       ry| j                  }|j                  syt        |j                        dkD  ryy)NFr   T)rO   r   rP   r|   r[   r;   r\   )rJ   rR   s     r   can_generate_trivial_fake_implr     sJ    b*%%%"~ ZZF
6>>Qr   c                  $    t        t        dd      S )zIf an op was defined in C++ and extended from Python using the
    torch.library APIs, returns if we require that there have been a
    m.set_python_module("mylib.ops") call from C++ that associates
    the C++ op with a python module.
    REQUIRES_SET_PYTHON_MODULET)rB   r   r    r   r   requires_set_python_moduler   !  s     ?$@$GGr   c                    t        | t        j                  j                  j                        sJ t        j                  j
                  j                  ||j                         f      \  }}|D cg c]w  }t        |t        j                        r[t        j                  j                  |      j                  t        j                  j                  j                        rt        |      y }}| j                  ||||      S c c}w r   )rO   rC   utils_python_dispatchTorchDispatchMode_pytreetree_flattenvaluesTensorr   _dispatch_keyshasDispatchKeyPythonr`   __torch_dispatch__)	curr_modeop_overloadr   r   args_flattened_aoverload_typess           r   handle_dispatch_moder   *  s    i!=!=!O!OPPP++88$9PQNA  a&HH##A&**588+?+?+F+FG 	QN  ''^T6RRs   1A<Dc                 :    t        d | j                  D              S )Nc              3   4   K   | ]  }|j                     y wr   )r   rX   r   s     r   rZ   z&has_kwarg_only_args.<locals>.<genexpr>=  s     6q||6s   r]   r_   rR   s    r   has_kwarg_only_argsr   <  s    6V%5%5666r   c                     | j                   D ];  }t        |j                        st        |j                        s.|j                  s; y y)NTF)r_   rz   r`   rx   r   )rR   r   s     r   has_kwarg_only_tensorsr   @  sD     #AFF+/Fqvv/N|| r   c                 :    t        d | j                  D              S )z
    Given a schema, returns True if the schema has a Tensor arg.
    A Tensor arg is any arg with a type annotation that might involve Tensor.
    c              3   t   K   | ]0  }t        |j                        xs t        |j                         2 y wr   )rz   r`   rx   r   s     r   rZ   z!has_tensor_arg.<locals>.<genexpr>O  s3       
QVV	$	G(?(G	Gs   68r   r   s    r   has_tensor_argr   J  s$    
  !!  r   c                     t        | j                        D ]C  \  }}|j                  t        j                  j                         u s1|j                  dk(  sA|c S  y)zx
    Given a schema, returns the id of the `device: torch.device` argument.
    If it does not exist, returns None.
    deviceN)	enumerater_   r`   r   DeviceObjTyperu   rF   )rR   indexrj   s      r   get_device_arg_indexr   U  sR    
   0 01 
s88r''++--#((h2FL r   allowed_nestingc              #      K   fd}| D ]  } ||      E d {     |j                         D ]  } ||      E d {     y 7 ,7 	w)Nc              3      K   t        | t        j                        r|  y dkD  r9t        | t        t        f      r"t        t        |       i dz
        E d {    y y y 7 w)Nr   r9   )rO   rC   r   r   r   iter_tensors)rj   r   s    r   checkziter_tensors.<locals>.checkc  sV     c5<<(Iq ZeT]%C#E#JOa4GHHH &D Hs   AA$A"A$)r   )r   r   r   r   rj   kwargs     `   r   r   r   `  sV     I  :  <  	s   A
A$A
AA
A
c                       yNz???r    r    r   r   <lambda>r   o      r   c                    |D ch c]7  }t        |t        j                        s|j                         j                  9 }}|}t        |t
              s|f}t        |i       D ]_  }|j                         j                  }|j                         j                  |v rt        |  d |        d      |j                  |       a yc c}w )zO
    custom operators' outputs must not alias any inputs or other outputs.
     (with implementation in   ): The output of this custom operator (1) must not also be an input to this custom operator and (2) may not alias any inputs to this custom operator or other returns. The most common way to trigger this error is if we have y = custom_op(x) and y and x are the same Tensor. Please instead return a clone of the offending output tensor(s) (e.g. return x.clone()) or refactor the custom operator to not return y.N)	rO   rC   r   untyped_storage_cdatar   r   r   add)	rF   prevresult
get_moduletstoragestuple_resulttensorkeys	            r   check_aliasing_constraintr   o  s     59XqJq%,,<W!!#**XHXLfe$y|R0 $$&--!!#**h6&1*, 	@, 	-  	S	 Ys
   CCc                       yr   r    r    r   r   r   r     r   r   c                     |}t        |t              s|f}t        j                  |||      rt	        |  d |        d      y)z
    custom operators' outputs must not have any aliases
    This version uses C++ implementation for perf.
    Only List container is supported.
    Tensors in Lists with not only Tensors are checked.
    r   r   N)rO   r   r   '_any_output_is_alias_to_input_or_outputr   )rF   r   r   r   r   r   s         r   _c_check_aliasing_constraintr     sW     Lfe$y	11$Mf-jl^ 	<( 	)
 	
 Nr   c                       e Zd ZdZd Zd Zy)MutationCheckerz
    Check if an operator mutated its arguments.
    Usage:

    checker = MutationChecker(op, flat_args, args_spec)
    op(*args, **kwargs)
    checker.check()
    c                     || _         || _        || _        |D cg c])  }t        |t        j
                        rt        |      nd + c}| _        y c c}w r   )rJ   	args_spec	flat_argsrO   rC   r   hash_tensorreal_pre_hashes)r   rJ   r   r   r   s        r   r'   zMutationChecker.__init__  sJ    ""MV 
HIjELL9KNtC 
  
s   .Ac                 <     j                   D cg c])  }t        |t        j                        rt	        |      nd + }}t         j                  |      D cg c]  \  }}t        |t        j                        rrt        |t        j                        rXt        j                  ||       xrA |j                         j                         xr |j                         j                          nd  }}}t        j                  | j                        \  }}t         j                  j                  ||      D ]W  \  }} fd}	t!        |j"                        r
 |	||       *t%        |j"                        s@|dn
t'        |      }
 |	||
       Y y c c}w c c}}w )Nc           
          | j                   |k(  ry t        j                  j                   d| j                   dj                  j
                   d| j                   rdnd d      )Nz: for argument 'z': the operator's schema z specified that the operator mutateszdoes not mutatea*   the argument, but this seems to be empirically wrong. Please make the schema and operator behavior consistent. You can specify that an operator mutates a Tensor by e.g. changing its schema type from 'Tensor name' to 'Tensor(a!) name'(use different identifiers (a, b, c, ...) for different Tensors))rW   r   rJ   r   rF   r|   )r   was_mutatedr   s     r   	check_onez(MutationChecker.check.<locals>.check_one  si    ==K/"ww}}o%5dii[@Yww' ($15IDU#V WWX	 	r   F)r   rO   rC   r   r   zipr   equalisnanallpytreetree_unflattenr   r   rJ   r|   rz   r`   rx   r]   )r   r   real_post_hashesprepostr  was_mutated_argswas_mutated_kwargsr   r  was_any_mutateds   `          r   r   zMutationChecker.check  sk    ^^
 )ELL9KNtC
 
 !!5!57GH

 T #u||,D%,,1O C&& ?YY[__&=4::<+;+;+=>
 
 06/D/D0
,, ",GGOO-/A"
 	1D+ #499-$,(3+6+>%CDT$0-	1

s   .FBFN)r   r   r   r   r'   r   r    r   r   r   r     s    
%1r   r   r   c                 Z    | j                         j                         j                         S )zNSome inexpensive hash. Used as a quick and dirty indicator for tensor mutation)detachfloatmean)r   s    r   r   r     s     88:""$$r   c                     t        |       ry| j                  }t        j                  j	                  |d      ryt        j
                  j                  j                  |      }|t        j                  j	                  |d      ryt        j
                  j                  j                  j                  |      }|j                  j                  yt        j                  j	                  |d      ryy|j                  yy)zIf an operator (that stays alive until FakeTensorMode) has a Fake kernel.
    Don't use this if the operator decomposes before FakeTensorMode.
    TCompositeImplicitAutogradCompositeExplicitAutogradMetaF)r   r   rC   r   %_dispatch_has_kernel_for_dispatch_key_library
custom_ops_maybe_get_opdefsimple_registry	singletonfind	fake_implkernel_abstract_fn)rJ   rF   opdefentrys       r   has_fake_kernelr$    s     &b)88Dxx55) NN%%66t<E}8899-
 ..88==dC??!!-8899$G
  )r   c                    g }g }t        | j                        D ]b  \  }}|j                  |j                  j                  s*|j                  r|j                  |j                         R|j                  |       d ||fS r   )r   r_   rV   rW   r   r   rF   )rR   idxskeysr   r   s        r   mutated_args_kwargsr(    sn    DDV--. 4??&4??+C+CDII&A :r   T)with_defaultfnr)  c                     y r   r    r*  r)  s     r   get_layout_constraint_tagr-    s     r   c                     y r   r    r,  s     r   r-  r-    s     r   c                    t         D ]  }|| j                  v s|c S  |rYt        |       rt        j                  j
                  S dd l}ddlm} t        |j                  j                  |j                        S y )Nr   )config)
tags_by_prioritytagsrP   r   Tagflexible_layouttorch._functorchr0  rB   #custom_op_default_layout_constraint)r*  r)  tagrC   r0  s        r   r-  r-  !  sa     "''>J b>66)))+uxx||V%O%OPPr   )r   r   impure_randomr8  c                J   ddl m} ddlm} t	        | t
        j                  j                        r*t        | dd      }||j                  ry| |v ry ||       yt	        | t
        j                  j                        rk| t
        j                  j                  j                  t
        j                  j                  j                  fv r|rt        |      dkD  r|d   |v S  ||       yy|rt        | dd      ry| t         v ryt        | dd      }||j                  ry| |v ryy)	a  
    An operator is impure if it:
    - Mutates its inputs (has a mutable schema)
    - Has nondeterministic/random behavior that mutates RNG state
    - Is explicitly marked as effectful via torch.library._register_effectful_op

    Args:
        op: The operator to check (function, OpOverload, HigherOrderOperator, etc.)
        args: Optional arguments that would be passed to the callable
        kwargs: Optional keyword arguments that would be passed to the callable
        impure_random: Whether to treat random operations as impure (default: True)

    Returns:
        bool: True if the callable has side effects, False otherwise
    r   )_get_effect)_side_effectful_functionsr|   NTF_nondeterministic_seeded)torch._higher_order_ops.effectsr:  torch.fx.noder;  rO   rC   r   r   rB   r[   r   rD   higher_orderauto_functionalizedauto_functionalized_v2r;   _RANDOM_FUNCTIONS)rJ   r   r   r8  r:  r;  rR   s          r   	is_impurerC  ?  s   . <7"ejj++,Y-&"3"3**r?&"ejj445II""66II""99
 
 D	AAw";;;r?& %?G 
RD)Ff//	&&r   )r9   )Qdataclassesr.   r0   collections.abcr   r   r   typingr   r   r   r	   r
   rC   torch.utils._pytreer   r   r  rb   r   r   
torch._opsr   	dataclassr   r"   intr   r5   r   r>   rI   boolrP   rq   rx   rz   r   r   r^   dictArgumentr   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r   r$  r   r(  r3  needs_exact_stridesneeds_contiguous_stridesneeds_fixed_stride_orderr4  r1  r-  randrandnrandintrandperm	rand_like
randn_likerandint_likenormalpoisson	bernoullimultinomialrB  rC  r    r   r   <module>r\     s     
 8 8 : :  $ $  % ! * * * 3 3 	 c 	 eCHo 	 % %
 %5: 5$ 5
 CH 9! 9!4 9!D 9!z  US UT U#j #L'$%*38_>B38neBKK$%&4#Lz d  HD HS$7 1 1 72#4#4 2,,  !2!2 uS$Y7G  FG 
* "38n ?B ell  >K 4 IV 
271 71t%5<< %ELL %


-- $ <	 1 1 	eDItCy<P6Q 	 FFFF##FF##FF	  
.2%dmVV 
 
%enbff 

 37   
JJ	KK	MM	NN	OO			LL	MM	OO	 $ '+'+HH 5c?
#H T#s(^$	H
 H 
Hr   