
    2Vh                         d dl mZ d dl mZ d dlmZ d dlmZ  edg       G d dej                               Zej                  j                  dej                        e_	        y	)
    )initializers)ops)keras_export)	optimizerzkeras.optimizers.Adagradc                   X     e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 d fd	Z fdZd Z fdZ xZS )Adagradaq  Optimizer that implements the Adagrad algorithm.

    Adagrad is an optimizer with parameter-specific learning rates,
    which are adapted relative to how frequently a parameter gets
    updated during training. The more updates a parameter receives,
    the smaller the updates.

    Args:
        learning_rate: A float, a
            `keras.optimizers.schedules.LearningRateSchedule` instance, or
            a callable that takes no arguments and returns the actual value to
            use. The learning rate. Defaults to `0.001`. Note that `Adagrad`
            tends to benefit from higher initial learning rate values compared
            to other optimizers. To match the exact form in the original paper,
            use `1.0`.
        initial_accumulator_value: Floating point value. Starting value for the
            accumulators (per-parameter momentum values). Must be non-negative.
        epsilon: Small floating point value for maintaining numerical stability.
        {{base_optimizer_keyword_args}}

    Reference:

    - [Duchi et al., 2011](
        http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf).
    c                 X    t        |   d|||||||	|
|||d| || _        || _        y )N)learning_rateweight_decayclipnorm	clipvalueglobal_clipnormuse_emaema_momentumema_overwrite_frequencyloss_scale_factorgradient_accumulation_stepsname )super__init__initial_accumulator_valueepsilon)selfr
   r   r   r   r   r   r   r   r   r   r   r   r   kwargs	__class__s                  L/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/optimizers/adagrad.pyr   zAdagrad.__init__#   sT    " 	 	
'%+%$;/(C	
 	
 *C&    c                     | j                   ry t        | 	  |       t        j                  | j
                        }| j                  |d|      | _        y )Naccumulator)initializer)builtr   buildr   Constantr   add_optimizer_variables_accumulators)r   var_listr!   r   s      r   r#   zAdagrad.buildE   sN    ::h"++D,J,JK!99m : 
r   c                    t        j                  ||j                        }t        j                  ||j                        }| j                  | j	                  |         }| j                  |t        j                  |             | j                  |t        j                  t        j                  ||      t        j                  t        j                  || j                                           y)z=Update step given gradient and the associated model variable.N)r   castdtyper&   _get_variable_index
assign_addsquare
assign_subdividemultiplysqrtaddr   )r   gradientvariabler
   lrr    s         r   update_stepzAdagrad.update_stepN   s    XXmX^^488Hhnn5(()A)A()KLSZZ%9:JJR*dll;<	
r   c                 t    t         |          }|j                  | j                  | j                  d       |S )N)r   r   )r   
get_configupdater   r   )r   configr   s     r   r8   zAdagrad.get_config^   s8    #%-1-K-K<<	
 r   )gMbP?g?gHz>NNNNFgGz?NNNadagrad)	__name__
__module____qualname____doc__r   r#   r6   r8   __classcell__)r   s   @r   r   r      sK    8 "% $$( D

 	 	r   r   z{{base_optimizer_keyword_args}}N)	keras.srcr   r   keras.src.api_exportr   keras.src.optimizersr   	Optimizerr   r?   replacebase_optimizer_keyword_argsr   r   r   <module>rG      sb    "  - * )*+_i!! _ ,_D //))%y'L'Lr   