
    Vh"                         d dl Z d dlmc mZ d dlmZ d dlmZm	Z	m
Z
 ddlmZ d dlZ G d d      Zee j                  j                   dd	d	fd
ZdZ	 ddZy)    N)wrapper_set_seed)compiled_function#min_cut_rematerialization_partitionnop   	randomizec                       e Zd Zd Zd Zd Zy)assert_raises_regexc                      || _         || _        y N)exception_clsregex)selfr   r   s      \/home/dcms/DCMS/lib/python3.12/site-packages/torch/testing/_internal/optests/aot_autograd.py__init__zassert_raises_regex.__init__   s    *
    c                      y r    )r   s    r   	__enter__zassert_raises_regex.__enter__   s    r   c                     || j                   k(  rGt        |      }t        j                  | j                  |      st        d| j                   d|       y|t        d| j                    d|       t        d      )Nz*Expected exception to match regex. regex: z, exception: Tz	Expected z% to be raised, instead got exception z,Expected exception to be raised but none was)r   strresearchr   AssertionError)r   exc_typeexc_val	tracebackmsgs        r   __exit__zassert_raises_regex.__exit__   s    t)))g,C99TZZ-$@MZ]Y^_a a D..//TU]T^_a aKLLr   N)__name__
__module____qualname__r   r   r    r   r   r   r   r      s    
Mr   r   TFc	           	      
    t        j                  ||f      \  D 	cg c]  }	t        |	t        j                        s|	! }}	 fd}
t        |
t        t        |t              }t        |
|      }|dk(  rRt        j                  t        j                  d |      }t        j                  t        j                  d |      }|xr |}|st        ||      }|s |||t               yt        |
||||||       yc c}	w )a  Compares func(*args, **kwargs) in eager-mode to under AOTAutograd.

    Compares outputs and (if check_gradients=True) gradients produced by
    AOTAutograd against eager-mode PyTorch.

    We assume that func(*args, **kwargs) succeeds in eager-mode PyTorch.

    c                     g }t        |       } D ]H  }t        |t        j                        r|j	                  t        |              8|j	                  |       J t        j                  |      \  }} |i |S r   )iter
isinstancetorchTensorappendnextpytreetree_unflatten)argsreconstructed_flat_argsvc_argsc_kwargs	args_spec	flat_argsfuncs        r   func_no_tensorsz+aot_autograd_check.<locals>.func_no_tensors6   sy    "$Dz 	2A!U\\*'..tDz:'..q1		2 "001H)TV(x((r   )dynamicpartition_fnautoc                     | j                   S r   requires_gradxs    r   <lambda>z$aot_autograd_check.<locals>.<lambda>G       PQP_P_ r   c                     | j                   S r   r;   r=   s    r   r?   z$aot_autograd_check.<locals>.<lambda>H   r@   r   )r   N)r,   tree_flattenr'   r(   r)   r   r   r   r   tree_any_onlyoutputs_msg,_test_aot_autograd_forwards_backwards_helper)r5   r.   kwargsr7   assert_raises_regex_fnassert_equals_fncheck_gradientstry_check_data_specializationskip_correctness_checkargr6   
compiled_foutany_tensor_requires_gradany_output_requires_gradcompiled_outr3   r4   s   `               @@r   aot_autograd_checkrR       s    $ "..f~>Iy$FC
3(ECFDF
) #c7AdfJ ?D
1C& #)#7#7F_ae#f #)#7#7F_ad#e 2O7O'
D9%\3K@0T+ACS%'=?9 Gs   D D zOutputs of the operator are different in eager-mode PyTorch vs AOTDispatcher tracing. This means the operator will have incorrect output underneath torch.compile. This could be because the operator's implementation not traceable.c                 p     d d fd	} ||d       |rt        |      } ||d       y y )Nc                    t        j                  | }|D cg c]+  }t        |t        j                        s|j
                  r|- }}t        | |      }t        j                  |      }d}|D ]>  }t        |t        j                        s||j                         j                         z  }@ t        |t        j                        sJ |t        j                  j                  ||d      fS c c}w )Nr   T)allow_unused)r,   arg_tree_leavesr'   r(   r)   r<   r   tree_leavessumabsautogradgrad)	fr.   r4   rL   	diff_argsrN   flat_outsmis	            r   call_forwards_backwardszM_test_aot_autograd_forwards_backwards_helper.<locals>.call_forwards_backwards`   s    **D1	$- (SC1N&&  (	 (q$'%%c* 	$A!U\\* aeegkkm#	$ "ell+++ENN''ID'III(s
   C/C/Fc                 P   	  |       \  }}t        j                  |       d   D cg c]  }t        |t        j
                        s|! }}t        d |D              }t        d |D              r"|r  t        d      5   |        d d d        y d} |       \  }}	s	  ||       	  |	|       y y # t         $ r |rY y  w xY wc c}w # 1 sw Y   y xY w# t         $ r}
 t        |
      t              |
d }
~
ww xY w# t         $ r}
 t        |
      |      |
d }
~
ww xY w)Nr   c              3   8   K   | ]  }|j                   d u  y wr   )grad_fn.0r>   s     r   	<genexpr>zN_test_aot_autograd_forwards_backwards_helper.<locals>.check.<locals>.<genexpr>{   s     HqQYYd2Hs   c              3   $   K   | ]  }|d u  
 y wr   r   re   s     r   rg   zN_test_aot_autograd_forwards_backwards_helper.<locals>.check.<locals>.<genexpr>|   s     ,QqDy,s   z1does not require grad and does not have a grad_fnzGradients of the operator are different in eager-mode PyTorch vs AOTDispatcher. This means the operator will have incorrect gradients underneath torch.compile. This could be because the operator's backward is incorrectly registered or not traceable.)	Exceptionr,   rB   r'   r(   r)   anyallRuntimeErrortyperD   )r.   ignore_failureorig_out	orig_gradr>   tensor_argsany_non_leavesr   rQ   compiled_graderH   rG   ra   rM   r\   rK   s              r   checkz;_test_aot_autograd_forwards_backwards_helper.<locals>.checkq   sC   	"9!T"BHi #)"5"5d";A">^Q*QPUP\P\B]q^^HKHH,),,'6ij :'
D9:C 	 '>j$&O#m%2 x8* 	: &+  		 _:  2d1gk*12  *d1gcl)*sX   B; C	C
C&	C 0	D ;C
C
C	D'C==D	D%D  D%)rn   TFr   )	r\   rM   r.   rG   rH   rJ   rK   ru   ra   s	   `` `` ` @r   rE   rE   [   s=    
J" *  *D 
$u% %d4( %r   rv   )r(   torch.utils._pytreeutils_pytreer,   torch.testing._utilsr   functorch.compiler   r   r   make_fxr	   r   r   testingassert_closerR   rD   rE   r   r   r   <module>r      sb     $ $ 1 Y Y  	M M4  333&+$1?h$  ?D@)r   