
    BVh6                        d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddl
mZ dd	lmZ dd
lmZ d Z G d de	      Z G d de	      Z G d de	      Z G d de	      Zd Z G d de	      Z G d de	      Zy)z(Layers that act as activation functions.    )dtypes)backend)constraints)initializers)regularizers)Layer)	InputSpec)tf_utils)math_opsc                      t               S N)globals     c/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/keras/layers/advanced_activations.pyget_globalsr      s
    	r   c                   X     e Zd ZdZd fd	Zd Z fdZej                  d        Z	 xZ
S )	LeakyReLUa  Leaky version of a Rectified Linear Unit.

  It allows a small gradient when the unit is not active:

  ```
    f(x) = alpha * x if x < 0
    f(x) = x if x >= 0
  ```

  Usage:

  >>> layer = tf.keras.layers.LeakyReLU()
  >>> output = layer([-3.0, -1.0, 0.0, 2.0])
  >>> list(output.numpy())
  [-0.9, -0.3, 0.0, 2.0]
  >>> layer = tf.keras.layers.LeakyReLU(alpha=0.1)
  >>> output = layer([-3.0, -1.0, 0.0, 2.0])
  >>> list(output.numpy())
  [-0.3, -0.1, 0.0, 2.0]

  Input shape:
    Arbitrary. Use the keyword argument `input_shape`
    (tuple of integers, does not include the batch axis)
    when using this layer as the first layer in a model.

  Output shape:
    Same shape as the input.

  Args:
    alpha: Float >= 0. Negative slope coefficient. Default to 0.3.

  c                     t        t        | 
  di | |t        d|z        d| _        t        j                  |      | _        y )NzKThe alpha value of a Leaky ReLU layer cannot be None, needs a float. Got %sTr   )superr   __init__
ValueErrorsupports_maskingr   cast_to_floatxalphaselfr   kwargs	__class__s      r   r   zLeakyReLU.__init__C   sP    	)T#-f-}  "'( ) ) !D''.DJr   c                 D    t        j                  || j                        S )N)r   r   relur   r   inputss     r   callzLeakyReLU.callL   s    <<djj11r   c                     dt        | j                        i}t        t        |          }t        t        |j                               t        |j                               z         S Nr   )floatr   r   r   
get_configdictlistitemsr   configbase_configr   s      r   r)   zLeakyReLU.get_configO   sN    uTZZ()F	435K[&&()D,@@AAr   c                     |S r   r   r   input_shapes     r   compute_output_shapezLeakyReLU.compute_output_shapeT       r   )g333333?__name__
__module____qualname____doc__r   r%   r)   r
   shape_type_conversionr3   __classcell__r   s   @r   r   r   !   s3    B/2B
 !! "r   r   c                        e Zd ZdZ	 	 	 	 d fd	Zej                  d        Zd Z fdZ	ej                  d        Z
 xZS )PReLUa  Parametric Rectified Linear Unit.

  It follows:

  ```
    f(x) = alpha * x for x < 0
    f(x) = x for x >= 0
  ```

  where `alpha` is a learned array with the same shape as x.

  Input shape:
    Arbitrary. Use the keyword argument `input_shape`
    (tuple of integers, does not include the samples axis)
    when using this layer as the first layer in a model.

  Output shape:
    Same shape as the input.

  Args:
    alpha_initializer: Initializer function for the weights.
    alpha_regularizer: Regularizer for the weights.
    alpha_constraint: Constraint for the weights.
    shared_axes: The axes along which to share learnable
      parameters for the activation function.
      For example, if the incoming feature maps
      are from a 2D convolution
      with output shape `(batch, height, width, channels)`,
      and you wish to share parameters across space
      so that each filter only has one set of parameters,
      set `shared_axes=[1, 2]`.
  c                 H   t        t        | 
  di | d| _        t	        j
                  |      | _        t        j
                  |      | _        t        j
                  |      | _
        |d | _        y t        |t        t        f      s	|g| _        y t        |      | _        y NTr   )r   r>   r   r   r   getalpha_initializerr   alpha_regularizerr   alpha_constraintshared_axes
isinstancer+   tuple)r   rB   rC   rD   rE   r   r   s         r   r   zPReLU.__init__{   s     
%)&) D)--.?@D)--.?@D'OO,<=DddE]3%dk*dr   c                    t        |dd        }| j                  | j                  D ]
  }d||dz
  <    | j                  |d| j                  | j                  | j
                        | _        i }| j                  r1t        dt        |            D ]  }|| j                  vs||   ||<    t        t        |      |      | _
        d| _        y )N   r   )shapenameinitializerregularizer
constraint)ndimaxesT)r+   rE   
add_weightrB   rC   rD   r   rangelenr	   
input_specbuilt)r   r2   param_shapeirP   s        r   buildzPReLU.build   s    {12'K# !AE****(( ! *DJ DQK() #!D$$$N$q'#  S%5DADODJr   c                 ~    t        j                  |      }| j                   t        j                  |       z  }||z   S r   r!   )r   r$   posnegs       r   r%   z
PReLU.call   s5    
,,v
C::+fW-
-C9r   c                 p   t        j                  | j                        t        j                  | j                        t        j                  | j                        | j                  d}t        t        | +         }t        t        |j                               t        |j                               z         S )N)rB   rC   rD   rE   )r   	serializerB   r   rC   r   rD   rE   r   r>   r)   r*   r+   r,   r-   s      r   r)   zPReLU.get_config   s    )33D4J4JK)33D4J4JK'11$2G2GH''	F t/1K[&&()D,@@AAr   c                     |S r   r   r1   s     r   r3   zPReLU.compute_output_shape   r4   r   )zerosNNN)r6   r7   r8   r9   r   r
   r:   rX   r%   r)   r3   r;   r<   s   @r   r>   r>   Y   s[    D ")!% $	+$ !! "(
B !! "r   r>   c                   X     e Zd ZdZd fd	Zd Z fdZej                  d        Z	 xZ
S )ELUa  Exponential Linear Unit.

  It follows:

  ```
    f(x) =  alpha * (exp(x) - 1.) for x < 0
    f(x) = x for x >= 0
  ```

  Input shape:
    Arbitrary. Use the keyword argument `input_shape`
    (tuple of integers, does not include the samples axis)
    when using this layer as the first layer in a model.

  Output shape:
    Same shape as the input.

  Args:
    alpha: Scale for the negative factor.
  c                     t        t        | 
  di | |t        d|z        d| _        t        j                  |      | _        y )Nz>Alpha of an ELU layer cannot be None, requires a float. Got %sTr   )r   ra   r   r   r   r   r   r   r   s      r   r   zELU.__init__   sN    	#t''} 249: ; ; D''.DJr   c                 B    t        j                  || j                        S r   )r   elur   r#   s     r   r%   zELU.call   s    ;;vtzz**r   c                     dt        | j                        i}t        t        |          }t        t        |j                               t        |j                               z         S r'   )r(   r   r   ra   r)   r*   r+   r,   r-   s      r   r)   zELU.get_config   sN    uTZZ()FT-/K[&&()D,@@AAr   c                     |S r   r   r1   s     r   r3   zELU.compute_output_shape   r4   r         ?r5   r<   s   @r   ra   ra      s2    */+B
 !! "r   ra   c                   X     e Zd ZdZd fd	Zd Z fdZej                  d        Z	 xZ
S )ThresholdedReLUa  Thresholded Rectified Linear Unit.

  It follows:

  ```
    f(x) = x for x > theta
    f(x) = 0 otherwise`
  ```

  Input shape:
    Arbitrary. Use the keyword argument `input_shape`
    (tuple of integers, does not include the samples axis)
    when using this layer as the first layer in a model.

  Output shape:
    Same shape as the input.

  Args:
    theta: Float >= 0. Threshold location of activation.
  c                     t        t        | 
  di | |t        d|z        |dk  rt        d|z        d| _        t        j                  |      | _        y )NzJTheta of a Thresholded ReLU layer cannot be None, requires a float. Got %sr   zAThe theta value of a Thresholded ReLU layer should be >=0, got %sTr   )r   rj   r   r   r   r   r   theta)r   rl   r   r   s      r   r   zThresholdedReLU.__init__   sr    	/4)3F3} 8:?@ A Aqy /167 8 8 D''.DJr   c                     t        j                  | j                  |j                        }|t        j                  t        j                  ||      |j                        z  S r   )r   castrl   dtypegreater)r   r$   rl   s      r   r%   zThresholdedReLU.call  sA    MM$**fll3EHMM("2"265"A6<<PPPr   c                     dt        | j                        i}t        t        |          }t        t        |j                               t        |j                               z         S )Nrl   )r(   rl   r   rj   r)   r*   r+   r,   r-   s      r   r)   zThresholdedReLU.get_config  sN    uTZZ()F9;K[&&()D,@@AAr   c                     |S r   r   r1   s     r   r3   z$ThresholdedReLU.compute_output_shape  r4   r   rg   r5   r<   s   @r   rj   rj      s3    *	/QB
 !! "r   rj   c                 ^    | t         j                  k(  rt         j                  j                  S y)a	  Large negative number as Tensor.

  This function is necessary because the standard value for epsilon
  in this module (-1e9) cannot be represented using tf.float16

  Args:
    tensor_type: a dtype to determine the type.

  Returns:
    a large negative number.
  g    e)r   float16min)tensor_types    r   _large_compatible_negativerw     s#     FNN">>	r   c                   Z     e Zd ZdZd fd	ZddZ fdZej                  d        Z	 xZ
S )Softmaxa  Softmax activation function.

  Example without mask:

  >>> inp = np.asarray([1., 2., 1.])
  >>> layer = tf.keras.layers.Softmax()
  >>> layer(inp).numpy()
  array([0.21194157, 0.5761169 , 0.21194157], dtype=float32)
  >>> mask = np.asarray([True, False, True], dtype=bool)
  >>> layer(inp, mask).numpy()
  array([0.5, 0. , 0.5], dtype=float32)

  Input shape:
    Arbitrary. Use the keyword argument `input_shape`
    (tuple of integers, does not include the samples axis)
    when using this layer as the first layer in a model.

  Output shape:
    Same shape as the input.

  Args:
    axis: Integer, or list of Integers, axis along which the softmax
      normalization is applied.
  Call arguments:
    inputs: The inputs, or logits to the softmax layer.
    mask: A boolean mask of the same shape as `inputs`. Defaults to `None`. The
      mask specifies 1 to keep and 0 to mask.

  Returns:
    softmaxed output with the same shape as `inputs`.
  c                 H    t        t        | 
  di | d| _        || _        y r@   )r   ry   r   r   axis)r   r{   r   r   s      r   r   zSoftmax.__init__B  s$    	'4!+F+ DDIr   c                    |>dt        j                  ||j                        z
  t        |j                        z  }||z  }t	        | j
                  t        t        f      rtt        | j
                        dkD  r8t        j                  |t        j                  || j
                  d      z
        S t        j                  || j
                  d         S t        j                  || j
                        S )Nrh   rI   T)r{   keepdimsr   )r{   )r   rn   ro   rw   rF   r{   rG   r+   rS   expreduce_logsumexpr   softmax)r   r$   maskadders       r   r%   zSoftmax.callG  s     X]]466
$V\\
24e
 of$))eT]+	TYY!	||FX%>%>T&3 3 4 	4 vDIIaL99??6		22r   c                     d| j                   i}t        t        |          }t	        t        |j                               t        |j                               z         S )Nr{   )r{   r   ry   r)   r*   r+   r,   r-   s      r   r)   zSoftmax.get_configZ  sI    dii F13K[&&()D,@@AAr   c                     |S r   r   r1   s     r   r3   zSoftmax.compute_output_shape_  r4   r   )r   r5   r<   s   @r   ry   ry   !  s3    @
3&B
 !! "r   ry   c                   X     e Zd ZdZd fd	Zd Z fdZej                  d        Z	 xZ
S )ReLUaY  Rectified Linear Unit activation function.

  With default values, it returns element-wise `max(x, 0)`.

  Otherwise, it follows:

  ```
    f(x) = max_value if x >= max_value
    f(x) = x if threshold <= x < max_value
    f(x) = negative_slope * (x - threshold) otherwise
  ```

  Usage:

  >>> layer = tf.keras.layers.ReLU()
  >>> output = layer([-3.0, -1.0, 0.0, 2.0])
  >>> list(output.numpy())
  [0.0, 0.0, 0.0, 2.0]
  >>> layer = tf.keras.layers.ReLU(max_value=1.0)
  >>> output = layer([-3.0, -1.0, 0.0, 2.0])
  >>> list(output.numpy())
  [0.0, 0.0, 0.0, 1.0]
  >>> layer = tf.keras.layers.ReLU(negative_slope=1.0)
  >>> output = layer([-3.0, -1.0, 0.0, 2.0])
  >>> list(output.numpy())
  [-3.0, -1.0, 0.0, 2.0]
  >>> layer = tf.keras.layers.ReLU(threshold=1.5)
  >>> output = layer([-3.0, -1.0, 1.0, 2.0])
  >>> list(output.numpy())
  [0.0, 0.0, 0.0, 2.0]

  Input shape:
    Arbitrary. Use the keyword argument `input_shape`
    (tuple of integers, does not include the batch axis)
    when using this layer as the first layer in a model.

  Output shape:
    Same shape as the input.

  Args:
    max_value: Float >= 0. Maximum activation value. Default to None, which
      means unlimited.
    negative_slope: Float >= 0. Negative slope coefficient. Default to 0.
    threshold: Float >= 0. Threshold value for thresholded activation. Default
      to 0.
  c                 \   t        t        | 
  di | ||dk  rt        d|z        ||dk  rt        d|z        ||dk  rt        d|z        d| _        |t        j                  |      }|| _        t        j                  |      | _        t        j                  |      | _	        y )Ng        z=max_value of a ReLU layer cannot be a negative value. Got: %szBnegative_slope of a ReLU layer cannot be a negative value. Got: %sz=threshold of a ReLU layer cannot be a negative value. Got: %sTr   )
r   r   r   r   r   r   r   	max_valuenegative_slope	threshold)r   r   r   r   r   r   s        r   r   zReLU.__init__  s    	$((R (*34 5 5"!4 (*89 : :IN (*34 5 5 !D((3iDN!00@D++I6DNr   c                 p    t        j                  || j                  | j                  | j                        S )N)r   r   r   )r   r"   r   r   r   r#   s     r   r%   z	ReLU.call  s.     <<"11"&.."&..2 2r   c                     | j                   | j                  | j                  d}t        t        |          }t        t        |j                               t        |j                               z         S )N)r   r   r   )	r   r   r   r   r   r)   r*   r+   r,   r-   s      r   r)   zReLU.get_config  s[    ^^--^^F
 d.0K[&&()D,@@AAr   c                     |S r   r   r1   s     r   r3   zReLU.compute_output_shape  r4   r   )Nr   r   r5   r<   s   @r   r   r   d  s3    -^7&2B !! "r   r   N)r9   tensorflow.python.frameworkr   tensorflow.python.kerasr   r   r   r   )tensorflow.python.keras.engine.base_layerr   )tensorflow.python.keras.engine.input_specr	   tensorflow.python.keras.utilsr
   tensorflow.python.opsr   r   r   r>   ra   rj   rw   ry   r   r   r   r   <module>r      s    / / + / 0 0 ; ? 2 *5 5pZE Zz(% (V,e ,^"@e @FV5 Vr   