
    2VhX-                         d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddl	m
Z
 dd	lmZ dd
lmZ ddlmZ ddlmZ  G d de      Zy)z2Keras base class for depthwise convolution layers.    )activations)constraints)initializers)ops)regularizers)standardize_data_format)	InputSpec)Layer)compute_conv_output_shape)standardize_padding)standardize_tuplec                   d     e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 d fd	Zd Zd Zd Zd Z fdZ	 xZ
S )	BaseDepthwiseConva  Abstract N-D depthwise convolution layer.

    Depthwise convolution is a type of convolution in which each input channel
    is convolved with a different kernel (called a depthwise kernel). You can
    understand depthwise convolution as the first step in a depthwise separable
    convolution.

    It is implemented via the following steps:

    - Split the input into individual channels.
    - Convolve each channel with an individual depthwise kernel with
      `depth_multiplier` output channels.
    - Concatenate the convolved outputs along the channels axis.

    Unlike a regular convolution, depthwise convolution does not mix information
    across different input channels.

    The `depth_multiplier` argument determines how many filter are applied to
    one input channel. As such, it controls the amount of output channels that
    are generated per input channel in the depthwise step.


    Args:
        rank: int, the rank of the convolution, e.g. 2 for 2D convolution.
        depth_multiplier: The number of depthwise convolution output channels
            for each input channel. The total number of depthwise convolution
            output channels will be equal to `input_channel * depth_multiplier`.
        kernel_size: int or tuple/list of `rank` integers, specifying the size
            of the depthwise convolution window.
        strides: int or tuple/list of `rank` integers, specifying the stride
            length of the depthwise convolution. If only one int is specified,
            the same stride size will be used for all dimensions.
            `strides > 1` is incompatible with `dilation_rate > 1`.
        padding: string, either `"valid"` or `"same"` (case-insensitive).
            `"valid"` means no padding. `"same"` results in padding evenly to
            the left/right or up/down of the input. When `padding="same"` and
            `strides=1`, the output has the same size as the input.
        data_format: string, either `"channels_last"` or `"channels_first"`.
            The ordering of the dimensions in the inputs. `"channels_last"`
            corresponds to inputs with shape `(batch, steps, features)`
            while `"channels_first"` corresponds to inputs with shape
            `(batch, features, steps)`. It defaults to the `image_data_format`
            value found in your Keras config file at `~/.keras/keras.json`.
            If you never set it, then it will be `"channels_last"`.
        dilation_rate: int or tuple/list of `rank` integers, specifying the
            dilation rate to use for dilated convolution. If only one int is
            specified, the same dilation rate will be used for all dimensions.
        activation: Activation function. If `None`, no activation is applied.
        use_bias: bool, if `True`, bias will be added to the output.
        depthwise_initializer: Initializer for the depthwsie convolution
            kernel. If `None`, the default initializer (`"glorot_uniform"`)
            will be used.
        bias_initializer: Initializer for the bias vector. If `None`, the
            default initializer (`"zeros"`) will be used.
        depthwise_regularizer: Optional regularizer for the convolution kernel.
        bias_regularizer: Optional regularizer for the bias vector.
        activity_regularizer: Optional regularizer function for the output.
        depthwise_constraint: Optional projection function to be applied to the
            kernel after being updated by an `Optimizer` (e.g. used to implement
            norm constraints or value constraints for layer weights). The
            function must take as input the unprojected variable and must return
            the projected variable (which must have the same shape). Constraints
            are not safe to use when doing asynchronous distributed training.
        bias_constraint: Optional projection function to be applied to the
            bias after being updated by an `Optimizer`.
    c                    t        |   d||t        j                  |      d| || _        || _        t        ||d      | _        t        ||d      | _        t        ||d      | _	        t        |      | _        t        |      | _        t        j                  |      | _        |	| _        t#        j                  |
      | _        t#        j                  |      | _        t        j                  |      | _        t        j                  |      | _        t-        j                  |      | _        t-        j                  |      | _        t3        | j                  dz         | _        | j                  | _        | j
                  (| j
                  dk  rt7        d| j
                   d	      t9        | j                        st7        d
| j                   d	      t9        | j                        st7        d| j                         t;        | j                        dkD  r>t;        | j                        dkD  r%t7        d| j                   d| j                         y y )N)	trainablenameactivity_regularizerkernel_sizestridesdilation_rate   )min_ndimr   zmInvalid value for argument `depth_multiplier`. Expected a strictly positive value. Received depth_multiplier=.zBThe argument `kernel_size` cannot contain 0. Received kernel_size=z;The argument `strides` cannot contains 0. Received strides=   zW`strides > 1` not supported in conjunction with `dilation_rate > 1`. Received: strides=z and dilation_rate= )super__init__r   getrankdepth_multiplierr   r   r   r   r   paddingr   data_formatr   
activationuse_biasr   depthwise_initializerbias_initializerdepthwise_regularizerbias_regularizerr   depthwise_constraintbias_constraintr	   
input_spec
ValueErrorallmax)selfr   r    r   r   r!   r"   r   r#   r$   r%   r&   r'   r(   r   r)   r*   r   r   kwargs	__class__s                       b/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/layers/convolutional/base_depthwise_conv.pyr   zBaseDepthwiseConv.__init__T   sD   , 	 	
!-!1!12F!G	
 		
 	 0,[$N($	B.4
 +732;?%//*5 %1%5%56K%L" , 0 01A B%1%5%56K%L" , 0 01A B$/OO4H$I!*?#TYY];++  ,1F1F!1K$$($9$9#:!=  4##$#//03 
 4<< <<.* 
 t||q S););%<q%@::>,, H!!%!3!3 46  &A     c           	         | j                   dk(  rd}|d   }nd}|d   }t        | j                  dz   ||i      | _        | j                  || j
                  fz   }| j                  d|| j                  | j                  | j                  d| j                        | _        | j                  rT| j                  d	| j
                  |z  f| j                  | j                  | j                  d| j                        | _        y d | _        y )
Nchannels_lastr   r   )r   axeskernelT)r   shapeinitializerregularizer
constraintr   dtypebias)r"   r	   r   r+   r   r    
add_weightr%   r'   r)   r=   r8   r$   r&   r(   r*   r>   )r/   input_shapechannel_axisinput_channeldepthwise_shapes        r2   buildzBaseDepthwiseConv.build   s   .L'OML'NM#YY],)F
 **!!.
 
 oo!222200** & 
 ==,,}<> 11 11//jj ( DI DIr3   c                 <    | j                   dk(  r|d   }|S |d   }|S )Nr5   r6   r   )r"   r/   r@   rB   s      r2   _get_input_channelz$BaseDepthwiseConv._get_input_channel   s2    .'OM  (NMr3   c                 <   | j                  |j                        }t        j                  || j                  | j
                  | j                  | j                  | j                        }| j                  r| j                  dk(  r$d| j                  dz   z  | j                  |z  fz   }n!d| j                  |z  fd| j                  z  z   }t        j                  | j                  |      }t        j                  ||      }| j                  | j                  |      S |S )N)r   r!   r   r"   r5   )r   r   )rG   r9   r   depthwise_convr8   r   r!   r   r"   r$   r   r    reshaper>   addr#   )r/   inputsrB   outputs
bias_shaper>   s         r2   callzBaseDepthwiseConv.call   s   //=$$KKLLLL,,((
 ==?2!TYY]3))M97 
  !6!6!FG KIIK 
 ;;tyy*5Dgggt,G??&??7++r3   c           	          | j                  |      }t        || j                  |z  | j                  | j                  | j
                  | j                  | j                        S )N)r   r!   r"   r   )rG   r   r    r   r   r!   r"   r   rF   s      r2   compute_output_shapez&BaseDepthwiseConv.compute_output_shape   sY    //<(!!M1LLLL((,,
 	
r3   c                    t         |          }|j                  | j                  | j                  | j
                  | j                  | j                  | j                  t        j                  | j                        | j                  t        j                  | j                        t        j                  | j                        t!        j                  | j"                        t!        j                  | j$                        t!        j                  | j&                        t)        j                  | j*                        t)        j                  | j,                        d       |S )N)r    r   r   r!   r"   r   r#   r$   r%   r&   r'   r(   r   r)   r*   )r   
get_configupdater    r   r   r!   r"   r   r   	serializer#   r$   r   r%   r&   r   r'   r(   r   r   r)   r*   )r/   configr1   s     r2   rS   zBaseDepthwiseConv.get_config   s   #%$($9$9#//<<<<#//!%!3!3)33DOOD MM)5)?)?..* %1$:$:))% *6)?)?..* %1$:$:))% )5(>(>--) )4(=(=--) $/#8#89M9M#N7	
> r3   )r   validNr   NTglorot_uniformzerosNNNNNTN)__name__
__module____qualname____doc__r   rD   rG   rO   rQ   rS   __classcell__)r1   s   @r2   r   r      s_    AP . "!!'HT"H6

! !r3   r   N)r]   	keras.srcr   r   r   r   r   keras.src.backendr   keras.src.layers.input_specr	   keras.src.layers.layerr
   keras.src.ops.operation_utilsr   #keras.src.utils.argument_validationr   r   r   r   r3   r2   <module>re      s7    8 ! ! "  " 5 1 ( C C AA Ar3   