
    2Vhq                     H    d dl mZ d dlmZ  ed       G d de             Zy)    )keras_export)Callbackzkeras.callbacks.LambdaCallbackc                   0     e Zd ZdZ	 	 	 	 	 	 d fd	Z xZS )LambdaCallbackac	  Callback for creating simple, custom callbacks on-the-fly.

    This callback is constructed with anonymous functions that will be called
    at the appropriate time (during `Model.{fit | evaluate | predict}`).
    Note that the callbacks expects positional arguments, as:

    - `on_epoch_begin` and `on_epoch_end` expect two positional arguments:
      `epoch`, `logs`
    - `on_train_begin` and `on_train_end` expect one positional argument:
      `logs`
    - `on_train_batch_begin` and `on_train_batch_end` expect two positional
      arguments: `batch`, `logs`
    - See `Callback` class definition for the full list of functions and their
      expected arguments.

    Args:
        on_epoch_begin: called at the beginning of every epoch.
        on_epoch_end: called at the end of every epoch.
        on_train_begin: called at the beginning of model training.
        on_train_end: called at the end of model training.
        on_train_batch_begin: called at the beginning of every train batch.
        on_train_batch_end: called at the end of every train batch.
        kwargs: Any function in `Callback` that you want to override by
            passing `function_name=function`. For example,
            `LambdaCallback(.., on_train_end=train_end_fn)`. The custom function
            needs to have same arguments as the ones defined in `Callback`.

    Example:

    ```python
    # Print the batch number at the beginning of every batch.
    batch_print_callback = LambdaCallback(
        on_train_batch_begin=lambda batch,logs: print(batch))

    # Stream the epoch loss to a file in JSON format. The file content
    # is not well-formed JSON but rather has a JSON object per line.
    import json
    json_log = open('loss_log.json', mode='wt', buffering=1)
    json_logging_callback = LambdaCallback(
        on_epoch_end=lambda epoch, logs: json_log.write(
            json.dumps({'epoch': epoch, 'loss': logs['loss']}) + '
'),
        on_train_end=lambda logs: json_log.close()
    )

    # Terminate some processes after having finished model training.
    processes = ...
    cleanup_callback = LambdaCallback(
        on_train_end=lambda logs: [
            p.terminate() for p in processes if p.is_alive()])

    model.fit(...,
              callbacks=[batch_print_callback,
                         json_logging_callback,
                         cleanup_callback])
    ```
    c                     t         |           | j                  j                  |       ||| _        ||| _        ||| _        ||| _        ||| _        ||| _	        y y )N)
super__init____dict__updateon_epoch_beginon_epoch_endon_train_beginon_train_endon_train_batch_beginon_train_batch_end)	selfr   r   r   r   r   r   kwargs	__class__s	           S/home/dcms/DCMS/lib/python3.12/site-packages/keras/src/callbacks/lambda_callback.pyr	   zLambdaCallback.__init__@   s{     	V$%"0D# ,D%"0D# ,D+(<D%)&8D# *    )NNNNNN)__name__
__module____qualname____doc__r	   __classcell__)r   s   @r   r   r      s'    7v !9 9r   r   N)keras.src.api_exportr   keras.src.callbacks.callbackr   r    r   r   <module>r      s/    - 1 ./Q9X Q9 0Q9r   