
    2Vh<                     |   d dl mZ d dl mZ d dlmZ d dlmZ d dlmZ d dl	m
Z
 d dlmZ dZd	Z ed
dg      	 	 	 	 	 	 	 	 dd       Z	 ddZ ed      dd       Z ed      dd       Zej&                  j)                  dej*                  ej,                        e_        ej$                  j.                  e_        y)    )backend)layers)keras_export)imagenet_utils)
Functional)operation_utils)
file_utilsz|https://storage.googleapis.com/tensorflow/keras-applications/inception_v3/inception_v3_weights_tf_dim_ordering_tf_kernels.h5zhttps://storage.googleapis.com/tensorflow/keras-applications/inception_v3/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5z+keras.applications.inception_v3.InceptionV3zkeras.applications.InceptionV3Nc           
         |dv s#t        j                  |      st        d|       |dk(  r| r|dk7  rt        d|       t        j                  |ddt        j                         | |      }|t        j                  |	      }n/t        j                  |      st        j                  ||
      }n|}t        j                         dk(  rd}	nd}	t        |ddddd      }
t        |
dddd      }
t        |
ddd      }
 t        j                  dd      |
      }
t        |
dddd      }
t        |
dddd      }
 t        j                  dd      |
      }
t        |
ddd      }t        |
ddd      }t        |ddd      }t        |
ddd      }t        |ddd      }t        |ddd      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d      }
t        |
ddd      }t        |
ddd      }t        |ddd      }t        |
ddd      }t        |ddd      }t        |ddd      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d      }
t        |
ddd      }t        |
ddd      }t        |ddd      }t        |
ddd      }t        |ddd      }t        |ddd      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d       }
t        |
d!dddd      }t        |
ddd      }t        |ddd      }t        |ddddd      } t        j                  dd      |
      }t        j                  |||g|	d"      }
t        |
ddd      }t        |
d#dd      }t        |d#dd$      }t        |dd$d      }t        |
d#dd      }t        |d#d$d      }t        |d#dd$      }t        |d#d$d      }t        |ddd$      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d%      }
t        d&      D ]  }t        |
ddd      }t        |
d'dd      }t        |d'dd$      }t        |dd$d      }t        |
d'dd      }t        |d'd$d      }t        |d'dd$      }t        |d'd$d      }t        |ddd$      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d(t        d|z         z         }
 t        |
ddd      }t        |
ddd      }t        |ddd$      }t        |dd$d      }t        |
ddd      }t        |dd$d      }t        |ddd$      }t        |dd$d      }t        |ddd$      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d)      }
t        |
ddd      }t        |d*dddd      }t        |
ddd      }t        |ddd$      }t        |dd$d      }t        |ddddd      } t        j                  dd      |
      }t        j                  |||g|	d+      }
t        d&      D ]	  }t        |
d*dd      }t        |
d!dd      }t        |d!dd      }t        |d!dd      }t        j                  ||g|	d,t        |      z         }t        |
d-dd      }t        |d!dd      }t        |d!dd      }t        |d!dd      }t        j                  ||g|	.      } t        j                  ddd      |
      }t        |ddd      }t        j                  ||||g|	d(t        d/|z         z         }
 | rQ t        j                   d01      |
      }
t        j"                  ||        t        j$                  ||d23      |
      }
n?|d4k(  r t        j                          |
      }
n|d5k(  r t        j&                         |
      }
|t)        j*                  |      }n|}t-        ||
|1      }|dk(  rP| rt        j.                  d6t0        d7d89      }nt        j.                  d:t2        d7d;9      }|j5                  |       |S ||j5                  |       |S )<a  Instantiates the Inception v3 architecture.

    Reference:
    - [Rethinking the Inception Architecture for Computer Vision](
        http://arxiv.org/abs/1512.00567) (CVPR 2016)

    This function returns a Keras image classification model,
    optionally loaded with weights pre-trained on ImageNet.

    For image classification use cases, see
    [this page for detailed examples](
      https://keras.io/api/applications/#usage-examples-for-image-classification-models).

    For transfer learning use cases, make sure to read the
    [guide to transfer learning & fine-tuning](
      https://keras.io/guides/transfer_learning/).

    Note: each Keras Application expects a specific kind of input preprocessing.
    For `InceptionV3`, call
    `keras.applications.inception_v3.preprocess_input` on your inputs
    before passing them to the model.
    `inception_v3.preprocess_input` will scale input pixels between -1 and 1.

    Args:
        include_top: Boolean, whether to include the fully-connected
            layer at the top, as the last layer of the network.
            Defaults to `True`.
        weights: One of `None` (random initialization),
            `imagenet` (pre-training on ImageNet),
            or the path to the weights file to be loaded.
            Defaults to `"imagenet"`.
        input_tensor: Optional Keras tensor (i.e. output of `layers.Input()`)
            to use as image input for the model. `input_tensor` is useful for
            sharing inputs between multiple different networks.
            Defaults to `None`.
        input_shape: Optional shape tuple, only to be specified
            if `include_top` is False (otherwise the input shape
            has to be `(299, 299, 3)` (with `channels_last` data format)
            or `(3, 299, 299)` (with `channels_first` data format).
            It should have exactly 3 inputs channels,
            and width and height should be no smaller than 75.
            E.g. `(150, 150, 3)` would be one valid value.
            `input_shape` will be ignored if the `input_tensor` is provided.
        pooling: Optional pooling mode for feature extraction
            when `include_top` is `False`.
            - `None` (default) means that the output of the model will be
                the 4D tensor output of the last convolutional block.
            - `avg` means that global average pooling
                will be applied to the output of the
                last convolutional block, and thus
                the output of the model will be a 2D tensor.
            - `max` means that global max pooling will be applied.
        classes: optional number of classes to classify images
            into, only to be specified if `include_top` is `True`, and
            if no `weights` argument is specified. Defaults to 1000.
        classifier_activation: A `str` or callable. The activation function
            to use on the "top" layer. Ignored unless `include_top=True`.
            Set `classifier_activation=None` to return the logits of the "top"
            layer. When loading pretrained weights, `classifier_activation`
            can only be `None` or `"softmax"`.
        name: The name of the model (string).

    Returns:
        A model instance.
    >   NimagenetzThe `weights` argument should be either `None` (random initialization), `imagenet` (pre-training on ImageNet), or the path to the weights file to be loaded; Received: weights=r     zbIf using `weights="imagenet"` with `include_top=True`, `classes` should be 1000. Received classes=i+  K   )default_sizemin_sizedata_formatrequire_flattenweights)shape)tensorr   channels_first          )   r   valid)stridespadding)r   @   )r   r   )r   P      0      `   r   r   samemixed0)axisnamemixed1mixed2i  mixed3      mixed4r      mixedmixed7i@  mixed8mixed9_i  )r&   	   avg_poolr'   predictions)
activationr'   avgmaxz2inception_v3_weights_tf_dim_ordering_tf_kernels.h5models 9a0d58056eeedaa3f26cb7ebd46da564)cache_subdir	file_hashz8inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5 bcbd6486424b2319ff4ef7d526e38f63)r	   exists
ValueErrorr   obtain_input_shaper   image_data_formatr   Inputis_keras_tensor	conv2d_bnMaxPooling2DAveragePooling2DconcatenaterangestrGlobalAveragePooling2Dvalidate_activationDenseGlobalMaxPooling2Dr   get_source_inputsr   get_fileWEIGHTS_PATHWEIGHTS_PATH_NO_TOPload_weights)include_topr   input_tensorinput_shapepoolingclassesclassifier_activationr'   	img_inputchannel_axisx	branch1x1	branch5x5branch3x3dblbranch_pool	branch3x3	branch7x7branch7x7dblibranch7x7x3branch3x3_1branch3x3_2branch3x3dbl_1branch3x3dbl_2inputsmodelweights_paths                              S/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/applications/inception_v3.pyInceptionV3rn      s
   b ))Z->->w-G! ")		+
 	
 *D  'y*
 	
 !33--/#K LL{3	&&|4LLI$I  "&66)RAvwGA!RAw/A!RAA3FF3A6A!RAw/A!S!Q0A3FF3A6A !RA&I!RA&I)RA.IQAq)L\2q!4L\2q!4L&))	K KQ2K	I|[9	A !RA&I!RA&I)RA.IQAq)L\2q!4L\2q!4L&))	K KQ2K	I|[9	A !RA&I!RA&I)RA.IQAq)L\2q!4L\2q!4L&))	K KQ2K	I|[9	A !S!QHIQAq)L\2q!4Lb!QL >&%%ff=a@K	L+.\	A
 !S!Q'I!S!Q'I)S!Q/I)S!Q/IQQ*L\315L\315L\315L\315L&))	K Ka3K	I|[9	A 1X 
aa+	aa+	ia3	ia3	 CA. sAq9 sAq9 sAq9 sAq9
f--FF

  S!Q7	<=3q1u:%
#
0 !S!Q'I!S!Q'I)S!Q/I)S!Q/IQQ*L\315L\315L\315L\315L&))	K Ka3K	I|[9	A !S!Q'I)S!QPIAsAq)KKa3KKa3KS!QK >&%%ff=a@K	K-Lx	A
 1X 
aa+	aa+		315	315&&+&SV#
	 !CA. sAq9"<a;"<a;))^,<

f--FF

  S!Q7	<=3q1u:%
1
: :F))z:1=**+@'J
FLL 5M

 e/--/2A+))+A.A  22<@vqt,E *%..D%<	L &..J#%<	L 	<( L 
	7#L    c           	         ||dz   }|dz   }nd}d}t        j                         dk(  rd}	nd}	 t        j                  |||f||d|      |       }  t        j                  |	d|	      |       }  t        j
                  d
|      |       } | S )a  Utility function to apply conv + BN.

    Args:
        x: input tensor.
        filters: filters in `Conv2D`.
        num_row: height of the convolution kernel.
        num_col: width of the convolution kernel.
        padding: padding mode in `Conv2D`.
        strides: strides in `Conv2D`.
        name: name of the ops; will become `name + '_conv'`
            for the convolution and `name + '_bn'` for the
            batch norm layer.

    Returns:
        Output tensor after applying `Conv2D` and `BatchNormalization`.
    N_bn_convr   r   r   F)r   r   use_biasr'   )r&   scaler'   relur5   )r   rB   r   Conv2DBatchNormalization
Activation)
r\   filtersnum_rownum_colr   r   r'   bn_name	conv_namebn_axiss
             rm   rE   rE     s    & ,7N		  "&66		'	 		A 	K!!we'J1MA,&t,Q/AHro   z0keras.applications.inception_v3.preprocess_inputc                 2    t        j                  | |d      S )Ntf)r   mode)r   preprocess_input)r\   r   s     rm   r   r     s    **	{ ro   z2keras.applications.inception_v3.decode_predictionsc                 0    t        j                  | |      S )N)top)r   decode_predictions)predsr   s     rm   r   r     s    ,,U<<ro    )r   reterror)Tr   NNNr   softmaxinception_v3)r$   r#   N)N)r!   )	keras.srcr   r   keras.src.api_exportr   keras.src.applicationsr   keras.src.modelsr   keras.src.opsr   keras.src.utilsr	   rQ   rR   rn   rE   r   r   PREPROCESS_INPUT_DOCformatPREPROCESS_INPUT_RET_DOC_TFPREPROCESS_INPUT_ERROR_DOC__doc__ ro   rm   <module>r      s     - 1 ' ) &F 
L  5( #	ccN HL'T @A B BC= D= *>>EE	22

3
3 F   
 ,>>FF  ro   