
    AVhQ                     
   d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddl
mZ ddl
mZ dd	l
mZ dd
l ddlmZ ddlmZ d,dZ	 d-dZ edddg      d.d       Z edddg      d.d       Z edg       G d d             Z ej0                  d        ej0                  d        ej0                  d        ej0                  d        ej0                  d        ej0                  d        ej0                  d        ed g       G d! d e             Z ej0                  d         ed"g       G d# d"e             Z ej0                  d"        ed$g       G d% d$e             Z ej0                  d$        ed&g       G d' d&e             Z ej0                  d&        ed(g       G d) d(e             Z ej0                  d(        ed*g       G d+ d*e             Z ej0                  d*       y)/zjInputs and Readers.

See the [Inputs and
Readers](https://tensorflow.org/api_guides/python/io_ops) guide.
    )context)dtypes)ops)tensor)	python_io)gen_data_flow_ops)
gen_io_ops)gen_parsing_ops)*)deprecation)	tf_exportNc                 l    |t        j                  | |||      S t        j                  | ||||      S )a$  Save a list of tensors to a file with given names.

  Example usage without slice info:
    Save("/foo/bar", ["w", "b"], [w, b])

  Example usage with slices:
    Save("/foo/bar", ["w", "w"], [slice0, slice1],
         tensor_slices=["4 10 0,2:-", "4 10 2,2:-"])

  Args:
    filename: the file name of the sstable.
    tensor_names: a list of strings.
    tensors: the list of tensors to be saved.
    tensor_slices: Optional list of strings to specify the shape and slices of
      a larger virtual tensor that each tensor is a part of.  If not specified
      each tensor is saved as a full slice.
    name: string.  Optional name for the op.

  Requires:
    The length of tensors should match the size of tensor_names and of
    tensor_slices.

  Returns:
    An Operation that saves the tensors.
  name)r	   savesave_slices)filenametensor_namestensorstensor_slicesr   s        L/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/ops/io_ops.py_saver   (   s=    4 ??8\7FF!!(L-")6 6    c                 v    t        j                  |      j                  }t        j                  | |||||      S )aD  Restore a tensor slice from a set of files with a given pattern.

  Example usage:
    RestoreSlice("/foo/bar-?????-of-?????", "w", "10 10 0,2:-", DT_FLOAT)

  Args:
    file_pattern: the file pattern used to match a set of checkpoint files.
    tensor_name: the name of the tensor to restore.
    shape_and_slice: the shape-and-slice spec of the slice.
    tensor_type: the type of the tensor to restore.
    name: string.  Optional name for the op.
    preferred_shard: Int. Optional shard to open first in the checkpoint file.

  Returns:
    A tensor of type "tensor_type".
  r   )r   as_dtype
base_dtyper	   restore_slice)file_patterntensor_nameshape_and_slicetensor_typer   preferred_shard	base_types          r   _restore_slicer$   I   s9    $ ook*55)		!	!K)D
" "r   zio.read_file	read_file)v1c                 .    t        j                  | |      S )a  Reads the contents of file.

  This operation returns a tensor with the entire contents of the input
  filename. It does not do any parsing, it just returns the contents as
  they are. Usually, this is the first step in the input pipeline.

  Example:

  >>> with open("/tmp/file.txt", "w") as f:
  ...   f.write("asdf")
  ...
  4
  >>> tf.io.read_file("/tmp/file.txt")
  <tf.Tensor: shape=(), dtype=string, numpy=b'asdf'>

  Example of using the op in a function to read an image, decode it and reshape
  the tensor containing the pixel data:

  >>> @tf.function
  ... def load_image(filename):
  ...   raw = tf.io.read_file(filename)
  ...   image = tf.image.decode_png(raw, channels=3)
  ...   # the `print` executes during tracing.
  ...   print("Initial shape: ", image.shape)
  ...   image.set_shape([28, 28, 3])
  ...   print("Final shape: ", image.shape)
  ...   return image

  Args:
    filename: string. filename to read from.
    name: string.  Optional name for the op.

  Returns:
    A tensor of dtype "string", with the file contents.
  )r	   r%   )r   r   s     r   r%   r%   a   s    J 
		h	--r   zio.serialize_tensorserialize_tensorc                 .    t        j                  | |      S )a  Transforms a Tensor into a serialized TensorProto proto.

  This operation transforms data in a `tf.Tensor` into a `tf.Tensor` of type
  `tf.string` containing the data in a binary string in little-endian format.
  This operation can transform scalar data and linear arrays, but it is most
  useful in converting multidimensional arrays into a format accepted by binary
  storage formats such as a `TFRecord` or `tf.train.Example`.

  See also:
  - `tf.io.parse_tensor`: inverse operation of `tf.io.serialize_tensor` that
  transforms a scalar string containing a serialized Tensor in little-endian
  format into a Tensor of a specified type.
  - `tf.ensure_shape`: `parse_tensor` cannot statically determine the shape of
  the parsed tensor. Use `tf.ensure_shape` to set the static shape when running
  under a `tf.function`
  - `.SerializeToString`, serializes a proto to a binary-string

  Example of serializing scalar data:

  >>> t = tf.constant(1)
  >>> tf.io.serialize_tensor(t)
  <tf.Tensor: shape=(), dtype=string, numpy=b'\x08...\x00'>

  Example of storing non-scalar data into a `tf.train.Example`:

  >>> t1 = [[1, 2]]
  >>> t2 = [[7, 8]]
  >>> nonscalar = tf.concat([t1, t2], 0)
  >>> nonscalar
  <tf.Tensor: shape=(2, 2), dtype=int32, numpy=
  array([[1, 2],
         [7, 8]], dtype=int32)>

  Serialize the data using `tf.io.serialize_tensor`.

  >>> serialized_nonscalar = tf.io.serialize_tensor(nonscalar)
  >>> serialized_nonscalar
  <tf.Tensor: shape=(), dtype=string, numpy=b'\x08...\x00'>

  Store the data in a `tf.train.Feature`.

  >>> feature_of_bytes = tf.train.Feature(
  ...   bytes_list=tf.train.BytesList(value=[serialized_nonscalar.numpy()]))
  >>> feature_of_bytes
  bytes_list {
    value: "\010...\000"
  }

  Put the `tf.train.Feature` message into a `tf.train.Example`.

  >>> features_for_example = {
  ...   'feature0': feature_of_bytes
  ... }
  >>> example_proto = tf.train.Example(
  ...   features=tf.train.Features(feature=features_for_example))
  >>> example_proto
  features {
    feature {
      key: "feature0"
      value {
        bytes_list {
          value: "\010...\000"
        }
      }
    }
  }

  Args:
    tensor: A `tf.Tensor`.
    name: string.  Optional name for the op.

  Returns:
    A Tensor of dtype string.
  )r
   r(   )r   r   s     r   r(   r(      s    Z 
	)	)&$	77r   
ReaderBasec                   r    e Zd ZdZddZed        ZddZ	 ddZddZ	ddZ
dd	Zdd
Zed        ZddZy)r*   a  Base class for different Reader types, that produce a record every step.

  Conceptually, Readers convert string 'work units' into records (key,
  value pairs).  Typically the 'work units' are filenames and the
  records are extracted from the contents of those files.  We want a
  single record produced per step, but a work unit can correspond to
  many records.

  Therefore we introduce some decoupling using a queue.  The queue
  contains the work units and the Reader dequeues from the queue when
  it is asked to produce a record (via Read()) but it has finished the
  last work unit.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  c                 ^    t        j                         rt        d      || _        || _        y)a  Creates a new ReaderBase.

    Args:
      reader_ref: The operation that implements the reader.
      supports_serialize: True if the reader implementation can
        serialize its state.

    Raises:
      RuntimeError: If eager execution is enabled.
    zsReaders are not supported when eager execution is enabled. Instead, please use tf.data to get data into your model.N)r   executing_eagerlyRuntimeError_reader_ref_supports_serialize)self
reader_refsupports_serializes      r   __init__zReaderBase.__init__   s7       "EF F "D1Dr   c                     | j                   S )zOp that implements the reader.)r/   r1   s    r   r2   zReaderBase.reader_ref  s     r   Nc                 T   t        |t        j                        r|}n|j                  }| j                  j
                  t        j                  k(  r"t        j                  | j                  ||      S t        j                  |      }t        j                  | j                  ||      S )a  Returns the next record (key, value) pair produced by a reader.

    Will dequeue a work unit from queue if necessary (e.g. when the
    Reader needs to start reading from a new file since it has
    finished with the previous file).

    Args:
      queue: A Queue or a mutable string Tensor representing a handle
        to a Queue, with string work items.
      name: A name for the operation (optional).

    Returns:
      A tuple of Tensors (key, value).
      key: A string scalar Tensor.
      value: A string scalar Tensor.
    r   )
isinstance
tensor_libTensor	queue_refr/   dtyper   resourcer	   reader_read_v2r   
fake_queuereader_read)r1   queuer   r;   old_queue_ops        r   readzReaderBase.read  s    " %**+i//i0&&t'7'7NN '11)<l##D$4$4lNNr   c                 X   t        |t        j                        r|}n|j                  }| j                  j
                  t        j                  k(  r#t        j                  | j                  |||      S t        j                  |      }t        j                  | j                  |||      S )a  Returns up to num_records (key, value) pairs produced by a reader.

    Will dequeue a work unit from queue if necessary (e.g., when the
    Reader needs to start reading from a new file since it has
    finished with the previous file).
    It may return less than num_records even before the last batch.

    Args:
      queue: A Queue or a mutable string Tensor representing a handle
        to a Queue, with string work items.
      num_records: Number of records to read.
      name: A name for the operation (optional).

    Returns:
      A tuple of Tensors (keys, values).
      keys: A 1-D string Tensor.
      values: A 1-D string Tensor.
    r   )r8   r9   r:   r;   r/   r<   r   r=   r	   reader_read_up_to_v2r   r?   reader_read_up_to)r1   rA   num_recordsr   r;   rB   s         r   
read_up_tozReaderBase.read_up_to#  s    ( %**+i//i0,,T-=-=-6-8268 8 '11)<l))$*:*:*6*5/35 5r   c                     | j                   j                  t        j                  k(  r!t	        j
                  | j                   |      S t	        j                  | j                   |      S )zReturns the number of records this reader has produced.

    This is the same as the number of Read executions that have
    succeeded.

    Args:
      name: A name for the operation (optional).

    Returns:
      An int64 Tensor.

    r   )r/   r<   r   r=   r	   reader_num_records_produced_v2reader_num_records_producedr1   r   s     r   num_records_producedzReaderBase.num_records_producedI  s\     066t7G7G<@B B 33D4D4D9=? ?r   c                     | j                   j                  t        j                  k(  r!t	        j
                  | j                   |      S t	        j                  | j                   |      S )zReturns the number of work units this reader has finished processing.

    Args:
      name: A name for the operation (optional).

    Returns:
      An int64 Tensor.
    r   )r/   r<   r   r=   r	   "reader_num_work_units_completed_v2reader_num_work_units_completedrL   s     r   num_work_units_completedz#ReaderBase.num_work_units_completed]  s^     0::4;K;K@DF F 778H8H=AC Cr   c                     | j                   j                  t        j                  k(  r!t	        j
                  | j                   |      S t	        j                  | j                   |      S )a   Produce a string tensor that encodes the state of a reader.

    Not all Readers support being serialized, so this can produce an
    Unimplemented error.

    Args:
      name: A name for the operation (optional).

    Returns:
      A string Tensor.
    r   )r/   r<   r   r=   r	   reader_serialize_state_v2reader_serialize_staterL   s     r   serialize_statezReaderBase.serialize_statem  sN     011$2B2BNN..t/?/?dKKr   c                     | j                   j                  t        j                  k(  r"t	        j
                  | j                   ||      S t	        j                  | j                   ||      S )aW  Restore a reader to a previously saved state.

    Not all Readers support being restored, so this can produce an
    Unimplemented error.

    Args:
      state: A string Tensor.
        Result of a SerializeState of a Reader with matching type.
      name: A name for the operation (optional).

    Returns:
      The created Operation.
    r   )r/   r<   r   r=   r	   reader_restore_state_v2reader_restore_state)r1   stater   s      r   restore_statezReaderBase.restore_state~  sY     0//


E. . ,,T-=-=u4PPr   c                     | j                   S )z:Whether the Reader implementation can serialize its state.)r0   r6   s    r   r3   zReaderBase.supports_serialize  s     ###r   c                     | j                   j                  t        j                  k(  r!t	        j
                  | j                   |      S t	        j                  | j                   |      S )zRestore a reader to its initial clean state.

    Args:
      name: A name for the operation (optional).

    Returns:
      The created Operation.
    r   )r/   r<   r   r=   r	   reader_reset_v2reader_resetrL   s     r   resetzReaderBase.reset  sN     0''(8(8tDD$$T%5%5DAAr   )FN)__name__
__module____qualname____doc__r4   propertyr2   rC   rH   rM   rQ   rU   rZ   r3   r_    r   r   r*   r*      sb    &2&  O< $5L?(C L"Q( $ $Br   
ReaderReadReaderReadUpToReaderNumRecordsProducedReaderNumWorkUnitsCompletedReaderSerializeStateReaderRestoreStateReaderResetWholeFileReaderc                   P     e Zd ZdZ ej
                  dd      d fd	       Z xZS )rn   a  A Reader that outputs the entire contents of a file as a value.

  To use, enqueue filenames in a Queue.  The output of Read will
  be a filename (key) and the contents of that file (value).

  See ReaderBase for supported methods.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  NzeQueue-based input pipelines have been replaced by `tf.data`. Use `tf.data.Dataset.map(tf.read_file)`.c                 \    t        j                  |      }t        t        |   |d       y)zZCreate a WholeFileReader.

    Args:
      name: A name for the operation (optional).
    r   Tr3   N)r	   whole_file_reader_v2superrn   r4   r1   r   rr	__class__s      r   r4   zWholeFileReader.__init__  s)     
	(	(d	3B	/4)")Fr   r`   ra   rb   rc   rd   r   
deprecatedr4   __classcell__rv   s   @r   rn   rn     s3     ;
 -.G.Gr   TextLineReaderc                   P     e Zd ZdZ ej
                  dd      d fd	       Z xZS )r{   a3  A Reader that outputs the lines of a file delimited by newlines.

  Newlines are stripped from the output.
  See ReaderBase for supported methods.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  Nz[Queue-based input pipelines have been replaced by `tf.data`. Use `tf.data.TextLineDataset`.c                 Z    t        j                  ||      }t        t        |   |       y)zCreate a TextLineReader.

    Args:
      skip_header_lines: An optional int. Defaults to 0.  Number of lines
        to skip from the beginning of every file.
      name: A name for the operation (optional).
    )skip_header_linesr   N)r	   text_line_reader_v2rs   r{   r4   )r1   r~   r   ru   rv   s       r   r4   zTextLineReader.__init__  s*     
	'	':K-1
3B	.$(,r   NNrw   rz   s   @r   r{   r{     s1    	 ;
 #$
-$
-r   FixedLengthRecordReaderc                   Z     e Zd ZdZ ej
                  dd      	 	 	 	 	 d fd	       Z xZS )r   a  A Reader that outputs fixed-length records from a file.

  See ReaderBase for supported methods.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  NzdQueue-based input pipelines have been replaced by `tf.data`. Use `tf.data.FixedLengthRecordDataset`.c                 b    t        j                  ||||||      }t        t        |   |       y)a\  Create a FixedLengthRecordReader.

    Args:
      record_bytes: An int.
      header_bytes: An optional int. Defaults to 0.
      footer_bytes: An optional int. Defaults to 0.
      hop_bytes: An optional int. Defaults to 0.
      name: A name for the operation (optional).
      encoding: The type of encoding for the file. Defaults to none.
    )record_bytesheader_bytesfooter_bytes	hop_bytesencodingr   N)r	   fixed_length_record_reader_v2rs   r   r4   )	r1   r   r   r   r   r   r   ru   rv   s	           r   r4   z FixedLengthRecordReader.__init__  s9    ( 
	1	1!!!
B 

!41"5r   )NNNNNrw   rz   s   @r   r   r     sB     ;
 ,-
 ! 6-6r   TFRecordReaderc                   P     e Zd ZdZ ej
                  dd      d fd	       Z xZS )r   a  A Reader that outputs the records from a TFRecords file.

  See ReaderBase for supported methods.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  Nz[Queue-based input pipelines have been replaced by `tf.data`. Use `tf.data.TFRecordDataset`.c                     t         j                  j                  |      }t        j                  ||      }t
        t        |   |       y)zCreate a TFRecordReader.

    Args:
      name: A name for the operation (optional).
      options: A TFRecordOptions object (optional).
    )r   compression_typeN)r   TFRecordOptionsget_compression_type_stringr	   tf_record_reader_v2rs   r   r4   )r1   r   optionsr   ru   rv   s        r   r4   zTFRecordReader.__init__*  sF     !00LL 
	'	'$4
6B	.$(,r   r   rw   rz   s   @r   r   r     s1     ;
 #$-$-r   
LMDBReaderc                   P     e Zd ZdZ ej
                  dd      d fd	       Z xZS )r   zA Reader that outputs the records from a LMDB file.

  See ReaderBase for supported methods.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  Nz_Queue-based input pipelines have been replaced by `tf.data`. Use `tf.contrib.data.LMDBDataset`.c                 Z    ~t        j                  |      }t        t        |   |       y)zCreate a LMDBReader.

    Args:
      name: A name for the operation (optional).
      options: A LMDBRecordOptions object (optional).
    r   N)r	   lmdb_readerrs   r   r4   )r1   r   r   ru   rv   s       r   r4   zLMDBReader.__init__K  s'     				T	*B	*d$R(r   r   rw   rz   s   @r   r   r   ?  s1     ;
 '(	)(	)r   IdentityReaderc                   P     e Zd ZdZ ej
                  dd      d fd	       Z xZS )r   ar  A Reader that outputs the queued work as both the key and value.

  To use, enqueue strings in a Queue.  Read will take the front
  work string and output (work, work).

  See ReaderBase for supported methods.

  @compatibility(eager)
  Readers are not compatible with eager execution. Instead, please
  use `tf.data` to get data into your model.
  @end_compatibility
  Nz\Queue-based input pipelines have been replaced by `tf.data`. Use `tf.data.Dataset.map(...)`.c                 \    t        j                  |      }t        t        |   |d       y)zYCreate a IdentityReader.

    Args:
      name: A name for the operation (optional).
    r   Trq   N)r	   identity_reader_v2rs   r   r4   rt   s      r   r4   zIdentityReader.__init__l  s)     
	&	&D	1B	.$((Er   r`   rw   rz   s   @r   r   r   ]  s3     ;
 $%F%Fr   )Nr   )r   r`   )rd   tensorflow.python.eagerr   tensorflow.python.frameworkr   r   r   r9   tensorflow.python.lib.ior   tensorflow.python.opsr   r	   r
    tensorflow.python.ops.gen_io_opstensorflow.python.utilr    tensorflow.python.util.tf_exportr   r   r$   r%   r(   r*   NotDifferentiablern   r{   r   r   r   r   rf   r   r   <module>r      sv  " , . + < . 3 , 1 / . 66D :<"0 >~{;<$. =$.N 46HIKK8KK8\ |nIB IB IBX   l #   & '   0 1   3 4   , -   * +   m $  !"Gj G #G6   ' (  !-Z - "-:   & ' ()*'6j '6 +'6T   / 0  !-Z - "-<   & ' |n) ) )4   l #  !FZ F "F6   & 'r   