
    Ph3                        d dl Z d dl mZ ddlmZmZmZmZmZmZm	Z	m
Z
mZ d dlmZmZ ddgZ G d de      Zd	d
e
 de	 de dz   e_        	 	 	 	 ddee   dee   dee   dee   dee   dee   dedededededededefdZdee   dee   dee   dee   dee   dedededededededefdZdee   dee   dee   dee   dee   dedededededededefdZy)    N)Tensor   )		Optimizer_use_grad_for_differentiable
_get_value_stack_if_compiling_default_to_fused_or_foreach_differentiable_doc_maximize_doc_foreach_doc_view_as_real)ListOptionalAdamaxadamaxc                   h     e Zd Z	 	 	 	 	 d
ddddee   dedef fdZ fdZd Zedd	       Z	 xZ
S )r   F)maximizedifferentiableforeachr   r   c          	      @   d|k  st        d|       d|k  st        d|       d|d   cxk  rdk  sn t        d|d          d|d   cxk  rdk  sn t        d|d          d|k  st        d	|       t        |||||||
      }	t        
|   ||	       y )N        zInvalid learning rate: zInvalid epsilon value: r         ?z#Invalid beta parameter at index 0: r   z#Invalid beta parameter at index 1: zInvalid weight_decay value: )lrbetasepsweight_decayr   r   r   )
ValueErrordictsuper__init__)selfparamsr   r   r   r   r   r   r   defaults	__class__s             ]C:\Users\daisl\Desktop\realtime-object-detection\venv\Lib\site-packages\torch/optim/adamax.pyr    zAdamax.__init__   s     by6rd;<<cz6se<==eAh$$B58*MNNeAh$$B58*MNNl";L>JKK%)
 	*    c                    t         |   |       | j                  D ]8  }|j                  dd        |j                  dd       |j                  dd       : t	        | j
                  j                               }t        |      dk7  xr t        j                  |d   d         }|s<|D ]6  }t        j                  t        |d         t        j                        |d<   8 y y )Nr   r   Fr   r   stepdtype)r   __setstate__param_groups
setdefaultliststatevalueslentorch	is_tensortensorfloatfloat32)r!   r/   groupstate_valuesstep_is_tensorsr$   s         r%   r+   zAdamax.__setstate__/   s    U#&&EY-Z/-u5 ' DJJ--/0l+q0 
eooOF#7
 !!LLqy)9O&	 " r&   c                    d}|d   D ]F  }|j                   |t        j                  |      z  }|j                  |       |j                   j                  rt        d      |j                  |j                          | j                  |   }	t        |	      dk(  rxt        j                  dt        j                        |	d<   t        j                  |t        j                        |	d	<   t        j                  |t        j                        |	d
<   |j                  |	d	          |j                  |	d
          |j                  |	d          I |S )NFr"   z(Adamax does not support sparse gradientsr   r   r)   r(   )memory_formatexp_avgexp_inf)gradr2   
is_complexappend	is_sparseRuntimeErrorr/   r1   r4   r6   
zeros_likepreserve_format)
r!   r7   params_with_gradgradsexp_avgsexp_infsstate_stepshas_complexpr/   s
             r%   _init_groupzAdamax._init_group=   s   xAvv~5++A..K##A&vv"#MNNLL JJqME 5zQ %S Ff#(#3#3U%:%:$i  $)#3#3U%:%:$i  OOE),-OOE),-uV}-/ !0 r&   c                 N   d}|$t        j                         5   |       }ddd       | j                  D ]a  }g }g }g }g }g }|d   \  }	}
|d   }|d   }|d   }|d   }|d   }|d   }| j                  ||||||      }t	        |||||||	|
||||||	       c |S # 1 sw Y   {xY w)
zPerform a single optimization step.

        Args:
            closure (Callable, optional): A closure that reevaluates the model
                and returns the loss.
        Nr   r   r   r   r   r   r   )	r   beta1beta2r   r   r   r   r   rK   )r2   enable_gradr,   rM   r   )r!   closurelossr7   rF   rG   rH   rI   rJ   rO   rP   r   r   r   r   r   r   rK   s                     r%   r(   zAdamax.stepY   s     ""$y % &&E!EHHK >LE5,CtB 0LI&GZ(H"#34N**52BE8U]_jkK )!-'# 'D K %$s   BB$)gMb`?)g?g+?g:0yE>r   NN)__name__
__module____qualname__r   boolr    r+   rM   r   r(   __classcell__)r$   s   @r%   r   r      sh     "& + $ + $ +  +  +DP8 ". ".r&   a  Implements Adamax algorithm (a variant of Adam based on infinity norm).

    .. math::
       \begin{aligned}
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{input}      : \gamma \text{ (lr)}, \beta_1, \beta_2
                \text{ (betas)},\theta_0 \text{ (params)},f(\theta) \text{ (objective)},
                \: \lambda \text{ (weight decay)},                                                \\
            &\hspace{13mm}    \epsilon \text{ (epsilon)}                                          \\
            &\textbf{initialize} :  m_0 \leftarrow 0 \text{ ( first moment)},
                u_0 \leftarrow 0 \text{ ( infinity norm)}                                 \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                                 \\
            &\textbf{for} \: t=1 \: \textbf{to} \: \ldots \: \textbf{do}                         \\
            &\hspace{5mm}g_t           \leftarrow   \nabla_{\theta} f_t (\theta_{t-1})           \\
            &\hspace{5mm}if \: \lambda \neq 0                                                    \\
            &\hspace{10mm} g_t \leftarrow g_t + \lambda  \theta_{t-1}                            \\
            &\hspace{5mm}m_t      \leftarrow   \beta_1 m_{t-1} + (1 - \beta_1) g_t               \\
            &\hspace{5mm}u_t      \leftarrow   \mathrm{max}(\beta_2 u_{t-1}, |g_{t}|+\epsilon)   \\
            &\hspace{5mm}\theta_t \leftarrow \theta_{t-1} - \frac{\gamma m_t}{(1-\beta^t_1) u_t} \\
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
            &\bf{return} \:  \theta_t                                                     \\[-1.ex]
            &\rule{110mm}{0.4pt}                                                          \\[-1.ex]
       \end{aligned}

    For further details regarding the algorithm we refer to `Adam: A Method for Stochastic Optimization`_.
    a
  
    Args:
        params (iterable): iterable of parameters to optimize or dicts defining
            parameter groups
        lr (float, optional): learning rate (default: 2e-3)
        betas (Tuple[float, float], optional): coefficients used for computing
            running averages of gradient and its square
        eps (float, optional): term added to the denominator to improve
            numerical stability (default: 1e-8)
        weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
        z	
        zd

    .. _Adam\: A Method for Stochastic Optimization:
        https://arxiv.org/abs/1412.6980

    r"   rG   rH   rI   rJ   r   r   r   rK   r   rO   rP   r   r   c	                >   t        d |D              st        d      |t        | |d      \  }}|r)t        j                  j                         rt        d      |r%t        j                  j                         st        }nt        } || |||||	|
||||||       y)zrFunctional API that performs adamax algorithm computation.

    See :class:`~torch.optim.Adamax` for details.
    c              3   P   K   | ]  }t        |t        j                           y wrT   )
isinstancer2   r   ).0ts     r%   	<genexpr>zadamax.<locals>.<genexpr>   s     @Kqz!U\\*Ks   $&zPAPI has changed, `state_steps` argument must contain a list of singleton tensorsNF)	use_fusedz6torch.jit.script not supported with foreach optimizers)r   rO   rP   r   r   r   r   rK   )allrC   r	   r2   jitis_scripting_multi_tensor_adamax_single_tensor_adamax)r"   rG   rH   rI   rJ   r   r   r   rK   r   rO   rP   r   r   _funcs                   r%   r   r      s    . @K@@^
 	
 1&.TYZ
7599))+STTuyy--/#$!%r&   c                :   t        |       D ]  \  }}||   }|
s|n| }||   }||   }||   }|dz  }|	dk7  r|j                  ||	      }t        j                  |      rTt        j                  |      }t        j                  |      }t        j                  |      }t        j                  |      }|j                  |d|z
         t        j                  |j                  |      j                  d      |j                         j                  |      j                  d      gd      }|st        j                  |dd|       n'|j                  t        j                  |dd             d|t        |      z  z
  }||z  }|j                  |||         y )Nr   r   alphaFkeepdimout)rl   )value)	enumerateaddr2   r@   view_as_reallerp_catmul_	unsqueezeabsadd_
unsqueeze_amaxcopy_r   addcdiv_)r"   rG   rH   rI   rJ   r   rO   rP   r   r   r   r   rK   iparamr?   r=   r>   step_tnorm_bufbias_correctionclrs                         r%   re   re      su   " f%5Qx#t$1+1+Q!188E86DE"&&u-E%%d+D((1G((1G 	dAI&99\\% **1-txxzs/C/N/Nq/QRTU
 JJxEw?MM%**Xq%@Aez&'999?"wt4C &r&   c          
      f   |rJ d       t        |       dk(  ry t        j                  | ||||g      }|j                         D ]  \  \  }}}}}}|
rt	        j
                  |      }|rt        ||||       |d   j                  r.t	        j                  |t	        j                  dd      d       nt	        j                  |d       |dk7  r3|
rt	        j                  |||       nt	        j                  |||      }t	        j                  ||d|z
         t	        j                  ||       t        ||      D ]  \  }}t	        j                  |j                  d      |j!                         j#                  |	      j%                  d      gd      }t	        j&                  |dd||j)                         j+                         f	        |D cg c]  }d|t-        |      z  z
   }}t/        |D cg c]
  }d
||z  z   c}      }t	        j0                  ||||        y c c}w c c}w )Nz#_foreach ops don't support autogradr   r   cpu)deviceri   r   Frk   )r1   r   "_group_tensors_by_device_and_dtyper0   r2   _foreach_negr   is_cpu_foreach_add_r4   _foreach_add_foreach_lerp__foreach_mul_ziprs   ru   rv   rw   rx   maxnewlongr   r   _foreach_addcdiv_)r"   rG   rH   rI   rJ   rO   rP   r   r   r   r   r   rK   grouped_tensorsgrouped_paramsgrouped_gradsgrouped_exp_avgsgrouped_exp_infsgrouped_state_stepsrf   r>   r?   r   r(   bias_correctionsr   r   s                              r%   rd   rd   %  s   " DDD
6{aBBFES[]egrCstOixii  jBe	a.-)9;KM`cd!..}=M.-9IK[\ q!(( 3U\\#e5T\_` 3Q71##M>V % 2 2=.Xd e 	-}a%iH 	,e4 !1=AMGTyy""1%txxzs';'F'Fq'IJAH IIh5w@R@R@T6UV	 B GZZFYdAD)9 99FYZ!Vf"gVf?2o)=#>Vf"gh0@BRTWXM jBF ["gs   H)7H.
)NFFF)r2   r   	optimizerr   r   r   r   r	   r
   r   r   r   typingr   r   __all__r   __doc__rX   r5   r   re   rd    r&   r%   <module>r      sc    ' ' ' "X
|Y |~2
	 
 		 		 3*j # 5L5<5 6l5 6l	5
 f5 d^5 5 5 5 
5 5 5  	!5" #5p25L25<25 6l25 6l	25
 f25 
25 25 25 	25 25 25 25 25j=YL=Y<=Y 6l=Y 6l	=Y
 f=Y =Y =Y 	=Y =Y 
=Y =Y =Y =Yr&   