
    2Vh	                     0    d dl mZ d dl mZ  G d d      Zy)    )backend)opsc                   .    e Zd ZdZd Zd Zd Zd Zd Zy)DropoutRNNCella  Object that holds dropout-related functionality for RNN cells.

    This class is not a standalone RNN cell. It suppose to be used with a RNN
    cell by multiple inheritance. Any cell that mix with class should have
    following fields:

    - `dropout`: a float number in the range `[0, 1]`.
        Dropout rate for the input tensor.
    - `recurrent_dropout`: a float number in the range `[0, 1]`.
        Dropout rate for the recurrent connections.
    - `seed_generator`, an instance of `backend.random.SeedGenerator`.

    This object will create and cache dropout masks, and reuse them for
    all incoming steps, so that the same mask is used for every step.
    c                 2   t        | dd       }t        j                  |      }|,t        j                  j                  ||| j                        S t        |      D cg c].  }t        j                  j                  ||| j                        0 c}S c c}w )Ndropout_mask_count)rateseed)getattrr   	ones_liker   randomdropoutseed_generatorrange)self
step_inputdropout_ratecountones_s         U/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/layers/rnn/dropout_rnn_cell.py_create_dropout_maskz#DropoutRNNCell._create_dropout_mask   s    2D9}}Z(=>>))<d.A.A *   u	  &&|$2E2E '   s   3Bc                     t        | d      sd | _        | j                  0| j                  dkD  r!| j                  || j                        | _        | j                  S )N_dropout_maskr   )hasattrr   r   r   r   r   s     r   get_dropout_maskzDropoutRNNCell.get_dropout_mask%   sU    t_-!%D%$,,*:!%!:!:DLL"D !!!    c                     t        | d      sd | _        | j                  0| j                  dkD  r!| j                  || j                        | _        | j                  S )N_recurrent_dropout_maskr   )r   r    recurrent_dropoutr   r   s     r   get_recurrent_dropout_maskz)DropoutRNNCell.get_recurrent_dropout_mask.   sZ    t67+/D(''/D4J4JQ4N+/+D+DD22,D( +++r   c                     d| _         y)a/  Reset the cached dropout mask if any.

        The RNN layer invokes this in the `call()` method
        so that the cached mask is cleared after calling `cell.call()`. The
        mask should be cached across all timestep within the same batch, but
        shouldn't be cached between batches.
        N)r   r   s    r   reset_dropout_maskz!DropoutRNNCell.reset_dropout_mask7   s     "r   c                     d | _         y )N)r    r$   s    r   reset_recurrent_dropout_maskz+DropoutRNNCell.reset_recurrent_dropout_maskA   s
    '+$r   N)	__name__
__module____qualname____doc__r   r   r"   r%   r'    r   r   r   r      s      ",",r   r   N)	keras.srcr   r   r   r,   r   r   <module>r.      s     =, =,r   