
    2Vh                     p    d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	  ed       G d d	e             Z
y
)z8Wrapper layer to apply every temporal slice of an input.    )backend)ops)keras_export)Wrapper)Layerzkeras.layers.TimeDistributedc                   @     e Zd ZdZ fdZd Zd Z fdZddZ xZ	S )TimeDistributeda<  This wrapper allows to apply a layer to every temporal slice of an input.

    Every input should be at least 3D, and the dimension of index one of the
    first input will be considered to be the temporal dimension.

    Consider a batch of 32 video samples, where each sample is a 128x128 RGB
    image with `channels_last` data format, across 10 timesteps.
    The batch input shape is `(32, 10, 128, 128, 3)`.

    You can then use `TimeDistributed` to apply the same `Conv2D` layer to each
    of the 10 timesteps, independently:

    >>> inputs = layers.Input(shape=(10, 128, 128, 3), batch_size=32)
    >>> conv_2d_layer = layers.Conv2D(64, (3, 3))
    >>> outputs = layers.TimeDistributed(conv_2d_layer)(inputs)
    >>> outputs.shape
    (32, 10, 126, 126, 64)

    Because `TimeDistributed` applies the same instance of `Conv2D` to each of
    the timestamps, the same set of weights are used at each timestamp.

    Args:
        layer: a `keras.layers.Layer` instance.

    Call arguments:
        inputs: Input tensor of shape (batch, time, ...) or nested tensors,
            and each of which has shape (batch, time, ...).
        training: Python boolean indicating whether the layer should behave in
            training mode or in inference mode. This argument is passed to the
            wrapped layer (only if the layer supports this argument).
        mask: Binary tensor of shape `(samples, timesteps)` indicating whether
            a given timestep should be masked. This argument is passed to the
            wrapped layer (only if the layer supports this argument).
    c                 p    t        |t              st        d|       t        |   |fi | d| _        y )NzZPlease initialize `TimeDistributed` layer with a `keras.layers.Layer` instance. Received: T)
isinstancer   
ValueErrorsuper__init__supports_masking)selflayerkwargs	__class__s      U/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/layers/rnn/time_distributed.pyr   zTimeDistributed.__init__/   sF    %'<<A7D  	)&) $    c                 ~    t        |t        t        f      rt        |      dk  rt	        d|       |d   g|dd  S )N   z``TimeDistributed` Layer should be passed an `input_shape` with at least 3 dimensions, received: r      )r   tuplelistlenr   )r   input_shapes     r   _get_child_input_shapez&TimeDistributed._get_child_input_shape8   sS    +t}5[9IA9M99DG  A1QR11r   c                 z    | j                  |      }| j                  j                  |      }|d   |d   g|dd  S )Nr      )r   r   compute_output_shape)r   r   child_input_shapechild_output_shapes       r   r    z$TimeDistributed.compute_output_shape@   sJ     77D!ZZ<<=NO"1%{1~O8J128NOOr   c                 F    | j                  |      }t        | 	  |       y )N)r   r   build)r   r   r!   r   s      r   r$   zTimeDistributed.buildE   s!     77D'(r   c                     t        j                        }d nt        j                        }|d   }|d   }t        j                         dk(  r1ddlm} |j
                  s?|=|dd |fk7  r4t        d| d| d|       ||d d ||fk7  rt        d| d| d|       d	 }	 |	       |	       fd
}
j                  d   Gt        j                  t        j                  d         D cg c]
  } |
|       c}      } |	|      S t        j                  |
t        j                  |            } |	|      S c c}w )Nr   r   
tensorflow)r&   r   z<`TimeDistributed` Layer should be passed a `mask` of shape (z, z, ...), received: mask.shape=c                 v    ddgt        dt        | j                              }t        j                  | |      S )z4Swaps the timestep and batch dimensions of a tensor.r   r   r   )axes)ranger   shaper   	transpose)datar(   s     r   time_distributed_transposez8TimeDistributed.call.<locals>.time_distributed_transposei   s1    q55C

O45D==D11r   c                     i }j                   j                  r
|    |d<   j                   j                  r|d<    j                   j                  |    fi |S )Nmasktraining)r   _call_has_mask_arg_call_has_training_argcall)ir   inputsr/   r   r0   s     r   step_functionz+TimeDistributed.call.<locals>.step_functionr   s\    Fzz,,1A!%avzz00%-z""4::??6!9777r   )r   r*   r   keras.src.utils.module_utilsr&   executing_eagerlyr   stackr)   vectorized_maparange)r   r5   r0   r/   r   
mask_shape
batch_size	timestepstfr-   r6   r4   outputss   ````         r   r3   zTimeDistributed.callI   s   ii'!\Tsyy
 ^
N	
 ??,E ((*qO	|3 (\I; 7,,6<9 
 #
2A;
 )
 $R	{ 3((2|5 	2
 ,F3-d3D	8 <<?&ii+0a+ABaq!BG .g66 ((

98MN)'22 Cs   :E	)NN)
__name__
__module____qualname____doc__r   r   r    r$   r3   __classcell__)r   s   @r   r	   r	   
   s#    !F%2P
)<3r   r	   N)rD   	keras.srcr   r   keras.src.api_exportr   keras.src.layers.core.wrapperr   keras.src.layers.layerr   r	    r   r   <module>rK      s;    >   - 1 ( ,-z3g z3 .z3r   