
    2Vh                     R    d Z ddlZddlmZ ddlmZ  G d dej                        Zy)zA class for JAX specific optimizer logic.

Its purpose is to route around statelessness
requirements in cond ops used for EMA handling
and gradient accumulation handling. We do this
by skipping conditionals entirely.
    N)numpy)base_optimizerc                       e Zd Zd Zy)JaxOptimizerc                      j                   r j                  dz    j                   z  dk(  } j                   D cg c]  }|j                   c} j                  D cg c]  }|j                   c}D cg c]   } j                   j                  |         " c}t        j                  j                  |fdfd      }t        j                  j                  |fdfd       j                         j                          j                   j                         t        j                  j                  |fdfd      }t        j                  j                  | fd	fd
      }t        |      D ]  \  }}|j                  |        t        | j                        D ]  \  }}|j                  |        t        |      D ]  \  }	}
|
j                  |	        n? j                         j                          j                   j                          j                  r j!                   j"                          j$                  ŉ j&                  dz    j$                  z  dk(  }|j)                  d      }t+        j,                  |      j)                  d      } j"                  D cg c]  }|j                   c}t         j"                   j.                        D ])  \  }}|j                  ||z  |j                  |z  z          + y y y c c}w c c}w c c}w c c}w )N   r   c                      D  cg c]-  } t        j                  | j                  | j                        / c} S c c} w )N)dtype)jnpzerosshaper
   )g	acc_gradss    O/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/backend/jax/optimizer.py<lambda>z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>&   s(    INq177!'':N Ns   2;c                  T    t              D  cg c]
  \  } }| |z    c}} S c c}} w Nzip)r   acc_gr   gradss     r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>'   s$    3ui3HIxq%UI Is   $c                  Z    t              D  cg c]  \  } }| |z   z   c}} S c c}} w r   r   )r   r   r   r   stepss     r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>,   s1    8;E98M,4AuQY%'  s   'c                      t               S r   )list)r   s   r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>/   s    U     c                  B    D  cg c]  } | j                    c} S c c} w r   )value)vtrainable_variabless    r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>;   s    *=>Q> >s   c                       S r    )current_trainable_vars_values   r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda><       4 r   c                  V    j                   D  cg c]  } | j                   c} S c c} w r   )	variablesr   )r   selfs    r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>@   s    $..9Q9 9s   &c                       S r   r"   )current_optimizer_vars_values   r   r   z7JaxOptimizer._backend_apply_gradients.<locals>.<lambda>A   r$   r   int32)gradient_accumulation_steps_iterationsr   r&   _accumulated_gradients_get_variable_indexjaxlaxcond_clip_gradients_apply_weight_decay_backend_update_steplearning_rater   assignuse_ema&_update_model_variables_moving_average_trainable_variablesema_overwrite_frequency
iterationsastyper   logical_not_model_variables_moving_average)r'   r   r    is_update_stepr   
new_g_accsnew_trainable_varsnew_opt_varsr   n_g_accg_accshould_overwrite_model_varsshould_overwrite_model_vars_int#should_not_overwrite_model_vars_intvaraverage_varr   r)   r#   r   s   ```             @@@@r   _backend_apply_gradientsz%JaxOptimizer._backend_apply_gradients   s7   ++  1$001456N 44E "5,,( >B^^+LAGG+L( - ++D,D,DQ,GHI
 NIJ GGLL $E ((/E$$%89%%*D,>,> "%>4"
 77<<94L   24GH  q   dnn=  q  #&j)"< &W%&
 ((/E$$%89%%*D,>,> <<77)) ++7OOa'00/145/6+ 066w? 0 7:oo/7&/ 4 &*%>%>0 !AGG0, ),--88) $C JJ#&EE))&IIJ	 8	 , ,M
J0s   L;(M %MM
N)__name__
__module____qualname__rJ   r"   r   r   r   r      s    ^r   r   )__doc__r/   r   r   keras.src.optimizersr   BaseOptimizerr   r"   r   r   <module>rQ      s)      /_>// _r   