
    BVh                     6    d Z ddlmZ ddlmZ  G d de      Zy)z&Input dataset creator for `model.fit`.    )distribute_lib)datac                       e Zd ZdZddZd Zy)DatasetCreatora}	  Object that returns a `tf.data.Dataset` upon invoking.

  `tf.keras.utils.experimental.DatasetCreator` is designated as a supported type
  for `x`, or the input, in `tf.keras.Model.fit`. Pass an instance of this class
  to `fit` when using a callable (with a `input_context` argument) that returns
  a `tf.data.Dataset`.

  ```python
  model = tf.keras.Sequential([tf.keras.layers.Dense(10)])
  model.compile(tf.keras.optimizers.SGD(), loss="mse")

  def dataset_fn(input_context):
    global_batch_size = 64
    batch_size = input_context.get_per_replica_batch_size(global_batch_size)
    dataset = tf.data.Dataset.from_tensors(([1.], [1.])).repeat()
    dataset = dataset.shard(
        input_context.num_input_pipelines, input_context.input_pipeline_id)
    dataset = dataset.batch(batch_size)
    dataset = dataset.prefetch(2)
    return dataset

  input_options = tf.distribute.InputOptions(
      experimental_fetch_to_device=True,
      experimental_per_replica_buffer_size=2)
  model.fit(tf.keras.utils.experimental.DatasetCreator(
      dataset_fn, input_options=input_options), epochs=10, steps_per_epoch=10)
  ```

  `Model.fit` usage with `DatasetCreator` is intended to work across all
  `tf.distribute.Strategy`s, as long as `Strategy.scope` is used at model
  creation:

  ```python
  strategy = tf.distribute.experimental.ParameterServerStrategy(
      cluster_resolver)
  with strategy.scope():
    model = tf.keras.Sequential([tf.keras.layers.Dense(10)])
  model.compile(tf.keras.optimizers.SGD(), loss="mse")
  ...
  ```

  Note: When using `DatasetCreator`, `steps_per_epoch` argument in `Model.fit`
  must be provided as the cardinality of such input cannot be inferred.

  Args:
    dataset_fn: A callable that takes a single argument of type
      `tf.distribute.InputContext`, which is used for batch size calculation and
      cross-worker input pipeline sharding (if neither is needed, the
      `InputContext` parameter can be ignored in the `dataset_fn`), and returns
      a `tf.data.Dataset`.
    input_options: Optional `tf.distribute.InputOptions`, used for specific
      options when used with distribution, for example, whether to prefetch
      dataset elements to accelerator device memory or host device memory, and
      prefetch buffer size in the replica device memory. No effect if not used
      with distributed training. See `tf.distribute.InputOptions` for more
      information.
  Nc                     t        |      st        d      |r%t        |t        j                        st        d      || _        || _        y )Nz7`dataset_fn` for `DatasetCreator` must be a `callable`.zL`input_options` for `DatasetCreator` must be a `tf.distribute.InputOptions`.)callable	TypeError
isinstancer   InputOptions
dataset_fninput_options)selfr   r   s      ]/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/keras/utils/dataset_creator.py__init__zDatasetCreator.__init__Q   sO    JOPPj)7)D)DF 6 7 7 !DO&D    c                 t     | j                   |i |}t        |t        j                        st	        d      |S )NzBThe `callable` provided to `DatasetCreator` must return a Dataset.)r   r
   
data_types	DatasetV2r	   )r   argskwargsdatasets       r   __call__zDatasetCreator.__call__\   s@     doot.v.Ggz334 # $ $Nr   )N)__name__
__module____qualname____doc__r   r    r   r   r   r      s    8t	'r   r   N)r   tensorflow.python.distributer   tensorflow.python.typesr   r   objectr   r   r   r   <module>r!      s     - 7 6MV Mr   