
    2Vh                     T    d dl mZ d dlmZ d dlmZ  ed       G d de             Zy)    )ops)keras_export)Layerzkeras.layers.RMSNormalizationc                   @     e Zd ZdZd fd	Zd Zd Zd Z fdZ xZ	S )RMSNormalizationar  Root Mean Square (RMS) Normalization layer.

    This layer normalizes the input tensor based on its RMS value.

    The Keras layer performs the operation as described in
    [Root Mean Square Layer Normalization](https://arxiv.org/pdf/1910.07467)
    by Biao Zhang et al.


    If `scale` is enabled, the layer will scale the normalized outputs via
    a learnable scaling factor.

    So, with scaling enabled, the normalization equations
    are as follows:

    Let the intermediate activations for a mini-batch to be the `inputs`.

    ```python
    rms_normalization(x) = x * rsqrt(mean(square(x))) * scale
    ```

    For example:

    >>> layer = keras.layers.RMSNormalization()
    >>> layer.build([5, 20, 30, 10])
    >>> print(layer.scale.shape)
    (10,)
    >>> layer(np.random.rand(1, 10)).numpy()
    array([[0.35098287, 1.0495652 , 1.4645109 , 1.2944688 , 0.31124955,
            1.2768592 , 1.184331  , 0.17474432, 0.49955517, 1.2428929 ]],
        dtype=float32)

    Args:
        axis: int. The axis on which to perform the normalization.
        epsilon: float. A small number to add to avoid division by zero.
    c                 @    t        |   di | || _        || _        y )N )super__init__axisepsilon)selfr   r   kwargs	__class__s       `/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/layers/normalization/rms_normalization.pyr   zRMSNormalization.__init__-   s!    "6"	    c                    t        | j                  t              r't        | j                  D cg c]  }||   	 c}      }n"|| j                     f}| j                  g| _        | j	                  d|d      | _        d| _        y c c}w )Nscaleones)nameshapeinitializerT)
isinstancer   listtuple
add_weightr   built)r   input_shapedimr   s       r   buildzRMSNormalization.build2   sw    dii&tyyA;s+ABE +-EDI__6 % 

 
 Bs   Bc                 p    t        j                  || j                  | j                  | j                        S )a  Applies RMS normalization to the input tensor.

        Args:
            x: Input tensor of shape (batch_size, input_dim).

        Returns:
            The RMS-normalized tensor of the same shape (batch_size, input_dim),
            scaled by the learned `scale` parameter.
        )r   r   r   )r   rms_normalizationr   r   r   )r   xs     r   callzRMSNormalization.call?   s,     $$TZZdii
 	
r   c           	          t        | j                  t              r| j                  g}n| j                  }|D ]=  }|t        |      k\  s|t        |       k  s!t	        d| d| d| j                          |S )NzAxis z" is out of bounds for input shape z. Received: axis=)r   r   intlen
ValueError)r   r   axesr   s       r   compute_output_shapez%RMSNormalization.compute_output_shapeM   s    dii%II;D99D 	Ds;''43{3C2C+C D6 "##.- 0&&*ii[2 	 r   c                 ^    | j                   | j                  d}t        |          }i ||S )N)r   r   )r   r   r
   
get_config)r   configbase_configr   s      r   r,   zRMSNormalization.get_config\   s7    II||
 g(*(+(((r   )gư>)
__name__
__module____qualname____doc__r   r    r$   r*   r,   __classcell__)r   s   @r   r   r      s'    #J

) )r   r   N)	keras.srcr   keras.src.api_exportr   keras.src.layers.layerr   r   r	   r   r   <module>r8      s2     - ( -.[)u [) /[)r   