
    Ph;                         d dl Z d dlmc mZ d dlmZ d dlmZm	Z	m
Z
 ddlmZ d dlZ G d d      Zee j                  j                   j"                  dd	fd
ZdZd Zy)    N)wrapper_set_seed)compiled_function#min_cut_rematerialization_partitionnop   	randomizec                       e Zd Zd Zd Zd Zy)assert_raises_regexc                      || _         || _        y N)exception_clsregex)selfr   r   s      wC:\Users\daisl\Desktop\realtime-object-detection\venv\Lib\site-packages\torch/testing/_internal/optests/aot_autograd.py__init__zassert_raises_regex.__init__
   s    *
    c                      y r    )r   s    r   	__enter__zassert_raises_regex.__enter__   s    r   c                     || j                   k(  rGt        |      }t        j                  | j                  |      st        d| j                   d|       y|t        d| j                    d|       t        d      )Nz*Expected exception to match regex. regex: z, exception: Tz	Expected z% to be raised, instead got exception z,Expected exception to be raised but none was)r   strresearchr   AssertionError)r   exc_typeexc_val	tracebackmsgs        r   __exit__zassert_raises_regex.__exit__   s    t)))g,C99TZZ-$@MZ]Y^_a a D..//TU]T^_a aKLLr   N)__name__
__module____qualname__r   r   r    r   r   r   r   r   	   s    
Mr   r   TFc                 \    t        j                  ||f      \  D cg c]  }t        |t        j                         }	}D cg c]  }t        |t        j                        s|! }} fd}
t        |
t        t        |t              }t        |
|      }|dk(  rRt        j                  t        j                  d |      }t        j                  t        j                  d |      }|xr |}|st        ||      } |||t               yt        |
|||||       yc c}w c c}w )a  Compares func(*args, **kwargs) in eager-mode to under AOTAutograd.

    Compares outputs and (if check_gradients=True) gradients produced by
    AOTAutograd against eager-mode PyTorch.

    We assume that func(*args, **kwargs) succeeds in eager-mode PyTorch.

    c                     g }t        |       } D ]H  }t        |t        j                        r|j	                  t        |              8|j	                  |       J t        j                  |      \  }} |i |S r   )iter
isinstancetorchTensorappendnextpytreetree_unflatten)argsreconstructed_flat_argsvc_argsc_kwargs	args_spec	flat_argsfuncs        r   func_no_tensorsz+aot_autograd_check.<locals>.func_no_tensors4   st    "$DzA!U\\*'..tDz:'..q1	  "001H)TV(x((r   )dynamicpartition_fnautoc                     | j                   S r   requires_gradxs    r   <lambda>z$aot_autograd_check.<locals>.<lambda>E       PQP_P_r   c                     | j                   S r   r;   r=   s    r   r?   z$aot_autograd_check.<locals>.<lambda>F   r@   r   r   N)r,   tree_flattenr'   r(   r)   r   r   r   r   tree_any_onlyoutputs_msg,_test_aot_autograd_forwards_backwards_helper)r5   r.   kwargsr7   assert_raises_regex_fnassert_equals_fncheck_gradientstry_check_data_specializationargargs_is_tensorr6   
compiled_foutany_tensor_requires_gradany_output_requires_gradcompiled_outr3   r4   s   `               @@r   aot_autograd_checkrS      s   " "..f~>Iy?HIyjell3yNI$F9C
3(EC9DF
) #c7AdfJ ?D
1C& #)#7#7F_ae#f #)#7#7F_ad#e 2O7O'
D9s<0T+ACS%'9 JFs   !D$	D))D)a  Outputs of the operator are different in eager-mode PyTorch vs AOTAutograd. This means the operator will have incorrect output underneath torch.compile. This could be because the operator's implementation not traceable or that there is a bug in AOTAutograd.c                 l     d d fd	} ||d       |rt        |      } ||d       y y )Nc                    t        j                  | }|D cg c]+  }t        |t        j                        s|j
                  r|- }}t        | |      }t        j                  |      }d}|D ]>  }t        |t        j                        s||j                         j                         z  }@ t        |t        j                        sJ |t        j                  j                  ||d      fS c c}w )Nr   T)allow_unused)r,   arg_tree_leavesr'   r(   r)   r<   r   tree_leavessumabsautogradgrad)	fr.   r4   rL   	diff_argsrO   flat_outsmis	            r   call_forwards_backwardszM_test_aot_autograd_forwards_backwards_helper.<locals>.call_forwards_backwards]   s    **D1	$- (ISC1N&& I	 (q$'%%c*A!U\\* aeegkkm#  "ell+++ENN''ID'III(s
   C/C/Fc                    	  	|       \  }}t        d |D              r  t        d      5   	
|        d d d        y d} 	
|       \  }} ||t                |||       y # t         $ r |rY y  w xY w# 1 sw Y   y xY w)Nc              3   $   K   | ]  }|d u  
 y wr   r   ).0r>   s     r   	<genexpr>zN_test_aot_autograd_forwards_backwards_helper.<locals>.check.<locals>.<genexpr>w   s     ,)QqDy)s   z1does not require grad and does not have a grad_fna  Gradients of the operator are different in eager-mode PyTorch vs AOTAutograd. This means the operator will have incorrect gradients underneath torch.compile. This could be because the operator's backward is incorrectly registered or not traceable or that there is a bug in AOTAutograd.rB   )	ExceptionallRuntimeErrorrE   )r.   ignore_failureorig_out	orig_gradr   rR   compiled_gradrI   rH   rb   rN   r]   s          r   checkz;_test_aot_autograd_forwards_backwards_helper.<locals>.checkn   s    	"9!T"BHi ,),,'6ij'
D9 k' 	 '>j$&O#mx[A	s;+  		 ks   A* 
A<*A97A9<B)rj   T)Fr   )r]   rN   r.   rH   rI   rK   rn   rb   s   `` ``  @r   rF   rF   X   s<    
J"< <4 
$u% %d4( %r   )r(   torch.utils._pytreeutils_pytreer,   2torch.testing._internal.common_methods_invocationsr   functorch.compiler   r   r   make_fxr	   r   r   testing_comparisonassert_closerS   rE   rF   r   r   r   <module>rx      s]     $ $ O Y Y  	M M4  322??&+0'fJ 8)r   