
    2VhlD                         d dl mZ d dl mZ d dl mZ d dl mZ d dl mZ d dl mZ d dlmZ d dl	m
Z
 d d	lmZ d d
lmZ d dlmZ  ed       G d dee             Z ed       G d de             Zy)    )activations)backend)constraints)initializers)ops)regularizers)keras_export)	InputSpec)Layer)DropoutRNNCell)RNNzkeras.layers.SimpleRNNCellc                   `     e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 d fd	Zd ZddZd	dZ fdZ xZ	S )
SimpleRNNCellav  Cell class for SimpleRNN.

    This class processes one step within the whole time sequence input, whereas
    `keras.layer.SimpleRNN` processes the whole sequence.

    Args:
        units: Positive integer, dimensionality of the output space.
        activation: Activation function to use.
            Default: hyperbolic tangent (`tanh`).
            If you pass `None`, no activation is applied
            (ie. "linear" activation: `a(x) = x`).
        use_bias: Boolean, (default `True`), whether the layer
            should use a bias vector.
        kernel_initializer: Initializer for the `kernel` weights matrix,
            used for the linear transformation of the inputs. Default:
            `"glorot_uniform"`.
        recurrent_initializer: Initializer for the `recurrent_kernel`
            weights matrix, used for the linear transformation
            of the recurrent state. Default: `"orthogonal"`.
        bias_initializer: Initializer for the bias vector. Default: `"zeros"`.
        kernel_regularizer: Regularizer function applied to the `kernel` weights
            matrix. Default: `None`.
        recurrent_regularizer: Regularizer function applied to the
            `recurrent_kernel` weights matrix. Default: `None`.
        bias_regularizer: Regularizer function applied to the bias vector.
            Default: `None`.
        kernel_constraint: Constraint function applied to the `kernel` weights
            matrix. Default: `None`.
        recurrent_constraint: Constraint function applied to the
            `recurrent_kernel` weights matrix. Default: `None`.
        bias_constraint: Constraint function applied to the bias vector.
            Default: `None`.
        dropout: Float between 0 and 1. Fraction of the units to drop for the
            linear transformation of the inputs. Default: 0.
        recurrent_dropout: Float between 0 and 1. Fraction of the units to drop
            for the linear transformation of the recurrent state. Default: 0.
        seed: Random seed for dropout.

    Call arguments:
        sequence: A 2D tensor, with shape `(batch, features)`.
        states: A 2D tensor with shape `(batch, units)`, which is the state
            from the previous time step.
        training: Python boolean indicating whether the layer should behave in
            training mode or in inference mode. Only relevant when `dropout` or
            `recurrent_dropout` is used.

    Example:

    ```python
    inputs = np.random.random([32, 10, 8]).astype(np.float32)
    rnn = keras.layers.RNN(keras.layers.SimpleRNNCell(4))
    output = rnn(inputs)  # The output has shape `(32, 4)`.
    rnn = keras.layers.RNN(
        keras.layers.SimpleRNNCell(4),
        return_sequences=True,
        return_state=True
    )
    # whole_sequence_output has shape `(32, 10, 4)`.
    # final_state has shape `(32, 4)`.
    whole_sequence_output, final_state = rnn(inputs)
    ```
    c                 v   |dk  rt        d| d      t        |   di | || _        t        j
                  j                  |      | _        || _        t        j                  |      | _        || _        t        j                  |      | _        t        j                  |      | _        t        j                  |      | _        t#        j                  |      | _        t#        j                  |      | _        t#        j                  |	      | _        t+        j                  |
      | _        t+        j                  |      | _        t+        j                  |      | _        t3        dt5        d|            | _        t3        dt5        d|            | _        | j                  | _        | j                  | _        y )Nr   zQReceived an invalid value for argument `units`, expected a positive integer, got .g      ?         )
ValueErrorsuper__init__seedr   randomSeedGeneratorseed_generatorunitsr   get
activationuse_biasr   kernel_initializerrecurrent_initializerbias_initializerr   kernel_regularizerrecurrent_regularizerbias_regularizerr   kernel_constraintrecurrent_constraintbias_constraintminmaxdropoutrecurrent_dropout
state_sizeoutput_size)selfr   r   r   r   r    r!   r"   r#   r$   r%   r&   r'   r*   r+   r   kwargs	__class__s                    O/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/layers/rnn/simple_rnn.pyr   zSimpleRNNCell.__init__O   sh   & A:4497!=  	"6"	%nn::4@
%//*5 "."2"23E"F%1%5%56K%L" , 0 01A B"."2"23E"F%1%5%56K%L" , 0 01A B!,1B!C$/OO4H$I!*?3C 12!$S#c3D*E!F**::    c                    | j                  |d   | j                  fd| j                  | j                  | j                        | _        | j                  | j                  | j                  fd| j                  | j                  | j                        | _	        | j                  rE| j                  | j                  fd| j                  | j                  | j                        | _        y d | _        y )Nkernel)shapenameinitializerregularizer
constraintrecurrent_kernelbias)
add_weightr   r   r"   r%   r5   r    r#   r&   r;   r   r!   r$   r'   r<   )r.   input_shapes     r1   buildzSimpleRNNCell.build   s    oor?DJJ/////-- & 
 !%::tzz*#222200 !0 !
 ==zzm 11 11// ( DI DIr2   c                    t        |t        t        f      r|d   n|}| j                  |      }| j	                  |      }|r|||z  }t        j                  || j                        }| j                  || j                  z  }|r|||z  }|t        j                  || j                        z   }| j                  | j                  |      }t        |t        t        f      r|gn|}	||	fS )Nr   )
isinstancelisttupleget_dropout_maskget_recurrent_dropout_maskr   matmulr5   r<   r;   r   )
r.   sequencestatestrainingprev_outputdp_maskrec_dp_maskhoutput	new_states
             r1   callzSimpleRNNCell.call   s    #-ftUm#DfQi&''155kB+')HJJx-99 NA/%3KSZZT-B-BCC??&__V,F *6D%= AVHv	y  r2   c                 ^    t        j                  || j                  f| j                        gS )N)dtype)r   zerosr,   compute_dtype)r.   
batch_sizes     r1   get_initial_statezSimpleRNNCell.get_initial_state   s)    IIz4??34;M;MN
 	
r2   c                    | j                   t        j                  | j                        | j                  t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                         | j"                  | j$                  | j&                  d}t(        | U         }i ||S )N)r   r   r   r   r    r!   r"   r#   r$   r%   r&   r'   r*   r+   r   )r   r   	serializer   r   r   r   r    r!   r   r"   r#   r$   r   r%   r&   r'   r*   r+   r   r   
get_configr.   configbase_configr0   s      r1   rY   zSimpleRNNCell.get_config   s-   ZZ%//@"."8"8''# &2%;%;**& !- 6 6t7L7L M"."8"8''# &2%;%;**& !- 6 6t7L7L M!,!6!6t7M7M!N$/$9$9))%  +44T5I5IJ||!%!7!7II3
6 g(*(+(((r2   )tanhTglorot_uniform
orthogonalrS   NNNNNNr   r   N)FN)
__name__
__module____qualname____doc__r   r?   rP   rV   rY   __classcell__r0   s   @r1   r   r      sT    =D +* "!!/&b4!(

) )r2   r   zkeras.layers.SimpleRNNc                   R    e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d fd	Zd fd	Zed        Zed        Zed        Z	ed        Z
ed        Zed	        Zed
        Zed        Zed        Zed        Zed        Zed        Zed        Zed        Z fdZed        Z xZS )	SimpleRNNa  Fully-connected RNN where the output is to be fed back as the new input.

    Args:
        units: Positive integer, dimensionality of the output space.
        activation: Activation function to use.
            Default: hyperbolic tangent (`tanh`).
            If you pass None, no activation is applied
            (ie. "linear" activation: `a(x) = x`).
        use_bias: Boolean, (default `True`), whether the layer uses
            a bias vector.
        kernel_initializer: Initializer for the `kernel` weights matrix,
            used for the linear transformation of the inputs. Default:
            `"glorot_uniform"`.
        recurrent_initializer: Initializer for the `recurrent_kernel`
            weights matrix, used for the linear transformation of the recurrent
            state.  Default: `"orthogonal"`.
        bias_initializer: Initializer for the bias vector. Default: `"zeros"`.
        kernel_regularizer: Regularizer function applied to the `kernel` weights
            matrix. Default: `None`.
        recurrent_regularizer: Regularizer function applied to the
            `recurrent_kernel` weights matrix. Default: `None`.
        bias_regularizer: Regularizer function applied to the bias vector.
            Default: `None`.
        activity_regularizer: Regularizer function applied to the output of the
            layer (its "activation"). Default: `None`.
        kernel_constraint: Constraint function applied to the `kernel` weights
            matrix. Default: `None`.
        recurrent_constraint: Constraint function applied to the
            `recurrent_kernel` weights matrix.  Default: `None`.
        bias_constraint: Constraint function applied to the bias vector.
            Default: `None`.
        dropout: Float between 0 and 1.
            Fraction of the units to drop for the linear transformation
            of the inputs. Default: 0.
        recurrent_dropout: Float between 0 and 1.
            Fraction of the units to drop for the linear transformation of the
            recurrent state. Default: 0.
        return_sequences: Boolean. Whether to return the last output
            in the output sequence, or the full sequence. Default: `False`.
        return_state: Boolean. Whether to return the last state
            in addition to the output. Default: `False`.
        go_backwards: Boolean (default: `False`).
            If `True`, process the input sequence backwards and return the
            reversed sequence.
        stateful: Boolean (default: `False`). If `True`, the last state
            for each sample at index i in a batch will be used as the
            initial state for the sample of index i in the following batch.
        unroll: Boolean (default: `False`).
            If `True`, the network will be unrolled,
            else a symbolic loop will be used.
            Unrolling can speed-up an RNN,
            although it tends to be more memory-intensive.
            Unrolling is only suitable for short sequences.

    Call arguments:
        sequence: A 3D tensor, with shape `[batch, timesteps, feature]`.
        mask: Binary tensor of shape `[batch, timesteps]` indicating whether
            a given timestep should be masked. An individual `True` entry
            indicates that the corresponding timestep should be utilized,
            while a `False` entry indicates that the corresponding timestep
            should be ignored.
        training: Python boolean indicating whether the layer should behave in
            training mode or in inference mode.
            This argument is passed to the cell when calling it.
            This is only relevant if `dropout` or `recurrent_dropout` is used.
        initial_state: List of initial state tensors to be passed to the first
            call of the cell.

    Example:

    ```python
    inputs = np.random.random((32, 10, 8))
    simple_rnn = keras.layers.SimpleRNN(4)
    output = simple_rnn(inputs)  # The output has shape `(32, 4)`.
    simple_rnn = keras.layers.SimpleRNN(
        4, return_sequences=True, return_state=True
    )
    # whole_sequence_output has shape `(32, 10, 4)`.
    # final_state has shape `(32, 4)`.
    whole_sequence_output, final_state = simple_rnn(inputs)
    ```
    c           	         t        |fi d|d|d|d|d|d|d|d|	d	|d
|d|d|d|d|d|j                  dd       d|j                  dd      dd}t        |   |f|||||d| t	        d      g| _        y )Nr   r   r   r    r!   r"   r#   r$   r%   r&   r'   r*   r+   r   rR   	trainableTr7   simple_rnn_cell)return_sequencesreturn_statego_backwardsstatefulunroll   )ndim)r   r   r   r   r
   
input_spec)r.   r   r   r   r   r    r!   r"   r#   r$   activity_regularizerr%   r&   r'   r*   r+   rl   rm   rn   ro   rp   r   r/   cellr0   s                           r1   r   zSimpleRNN.__init__(  s   2 
!
 
  2	

 #8
 .
  2
 #8
 .
 0
 "6
 ,
 
 0
 
  **Wd+!
" jjd3#
$ #%
( 		
-%%	
 	
 %!,-r2   c                 *    t         |   ||||      S )N)maskrI   initial_state)r   rP   )r.   	sequencesrx   rw   rI   r0   s        r1   rP   zSimpleRNN.call`  s"    w|D8=  
 	
r2   c                 .    | j                   j                  S r`   )ru   r   r.   s    r1   r   zSimpleRNN.unitse  s    yyr2   c                 .    | j                   j                  S r`   )ru   r   r{   s    r1   r   zSimpleRNN.activationi  s    yy###r2   c                 .    | j                   j                  S r`   )ru   r   r{   s    r1   r   zSimpleRNN.use_biasm  s    yy!!!r2   c                 .    | j                   j                  S r`   )ru   r   r{   s    r1   r   zSimpleRNN.kernel_initializerq      yy+++r2   c                 .    | j                   j                  S r`   )ru   r    r{   s    r1   r    zSimpleRNN.recurrent_initializeru      yy...r2   c                 .    | j                   j                  S r`   )ru   r!   r{   s    r1   r!   zSimpleRNN.bias_initializery      yy)))r2   c                 .    | j                   j                  S r`   )ru   r"   r{   s    r1   r"   zSimpleRNN.kernel_regularizer}  r   r2   c                 .    | j                   j                  S r`   )ru   r#   r{   s    r1   r#   zSimpleRNN.recurrent_regularizer  r   r2   c                 .    | j                   j                  S r`   )ru   r$   r{   s    r1   r$   zSimpleRNN.bias_regularizer  r   r2   c                 .    | j                   j                  S r`   )ru   r%   r{   s    r1   r%   zSimpleRNN.kernel_constraint      yy***r2   c                 .    | j                   j                  S r`   )ru   r&   r{   s    r1   r&   zSimpleRNN.recurrent_constraint  s    yy---r2   c                 .    | j                   j                  S r`   )ru   r'   r{   s    r1   r'   zSimpleRNN.bias_constraint  s    yy(((r2   c                 .    | j                   j                  S r`   )ru   r*   r{   s    r1   r*   zSimpleRNN.dropout  s    yy   r2   c                 .    | j                   j                  S r`   )ru   r+   r{   s    r1   r+   zSimpleRNN.recurrent_dropout  r   r2   c                 $   | j                   t        j                  | j                        | j                  t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                        t        j                  | j                         t        j                  | j"                        | j$                  | j&                  d}t(        | U         }|d= i ||S )N)r   r   r   r   r    r!   r"   r#   r$   rt   r%   r&   r'   r*   r+   ru   )r   r   rX   r   r   r   r   r    r!   r   r"   r#   r$   rt   r   r%   r&   r'   r*   r+   r   rY   rZ   s      r1   rY   zSimpleRNN.get_config  sG   ZZ%//@"."8"8''# &2%;%;**& !- 6 6t7L7L M"."8"8''# &2%;%;**& !- 6 6t7L7L M$0$:$:))% "-!6!6t7M7M!N$/$9$9))%  +44T5I5IJ||!%!7!77
: g(*(+(((r2   c                      | di |S )Nr   r   )clsr[   s     r1   from_configzSimpleRNN.from_config  s    }V}r2   )r]   Tr^   r_   rS   NNNNNNNr   r   FFFFFN)NNF)ra   rb   rc   rd   r   rP   propertyr   r   r   r   r    r!   r"   r#   r$   r%   r&   r'   r*   r+   rY   classmethodr   re   rf   s   @r1   rh   rh      s   Ql +* "!!-6.p

   $ $ " " , , / / * * , , / / * * + + . . ) ) ! ! + + )D  r2   rh   N)	keras.srcr   r   r   r   r   r   keras.src.api_exportr	   keras.src.layers.input_specr
   keras.src.layers.layerr   %keras.src.layers.rnn.dropout_rnn_cellr   keras.src.layers.rnn.rnnr   r   rh   r   r2   r1   <module>r      sq    !  ! "  " - 1 ( @ ( *+A)E> A) ,A)H &'m m (mr2   