
    Ph              
       j    d dl mZmZ d dlZd dlmc mZ d dlm	Z	 de	de
dededef
d	Zd
e	de
defdZy)    )AnyCallableN)HigherOrderOperatoroperatordelayed_errorargskwargsreturnc                    t         j                  j                         5   | |i |}t        j                  | }t        j
                         rt        d |D              r|rkt         j                  j                  j                  dt        |        d      d t        j                  t         j                  fd|      cddd       S t        dt        |              |cddd       S # 1 sw Y   yxY w)au  If autograd is enabled and any of the arguments require grad this will either
    raise an error or return a DelayedError depending on the value of delayed.

    Args:
        operator: The HigherOrderOperator to call with the *args and **kwargs with
        op_name: The name of the HigherOrderOperator
        delayed_error: If True, return a DelayedError instead of raising an error
        args: The flattened operands to the HigherOrderOperator
        kwargs: The keyword arguments to the HigherOrderOperator

    Raises:
        RuntimeError: If autograd is enabled and any of the arguments to the HigherOrderOperator
    c              3   j   K   | ]+  }t        |t        j                        s|j                   - y wN)
isinstancetorchTensorrequires_grad).0fs     hC:\Users\daisl\Desktop\realtime-object-detection\venv\Lib\site-packages\torch/_higher_order_ops/utils.py	<genexpr>z1autograd_not_implemented_inner.<locals>.<genexpr>   s%      +
%2jELL6QAOO]s   33zAutograd not implemented for    c                     t        j                  |       st        j                  |       r| j                         } d| _        | S )NT)r   is_floating_point
is_complexdetachr   )tensors    r   fake_requires_gradz:autograd_not_implemented_inner.<locals>.fake_requires_grad$   s5    ..v6%:J:J6:R!'/3,!M    c                        |             S r    )xerr_fnr   s    r   <lambda>z0autograd_not_implemented_inner.<locals>.<lambda>+   s    F3Ea3H,Ir   N)r   _C_AutoDispatchBelowAutogradpytreearg_tree_leavesis_grad_enabledany
_functionsDelayedErrorstrtree_map_onlyr   RuntimeError)r   r   r   r	   resultflat_operandsr!   r   s         @@r   autograd_not_implemented_innerr0      s      
	,	,	.4*6*..5  "s +
%2+
 (
 ,,993CM?C
" ++LL"I6% 
/	., #%B3x=/#RSS/ 
/	.	.s   B$C1C11C:opdeferred_errorc                       fd}|S )Nc                  $    t        g| i |S r   )r0   )r   r	   r2   r1   s     r   innerz'autograd_not_implemented.<locals>.inner3   s    -b.R4R6RRr   r   )r1   r2   r5   s   `` r   autograd_not_implementedr6   2   s    S Lr   )typingr   r   r   torch.utils._pytreeutils_pytreer%   
torch._opsr   boolr0   r6   r   r   r   <module>r=      s]       $ $ *'!'26'?B'NQ''T!4 d x r   