
    BVhQj                     j   d Z ddlmZ ddlmZ ddlZddlZddlZddlZddl	Z	ddl
Z
ddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlZddlmZ ddlmZ ddlmZ ddlmZ dd	lm Z  ejB                  d   d
k(  rd(dZ"nddl#m"Z" d Z$d)dZ%	 	 	 	 	 	 	 	 d*dZ&d Z'd+dZ(d,dZ)d-dZ* G d de+      Z,d Z- G d de+      Z.d Z/i a0da1 ejd                         Z3da4 e5       Z6da7 ejp                         Z9d Z:d Z;d Z<d Z=d Z> G d  d!e+      Z? G d" d#e?      Z@d(d$ZAd% ZB G d& d'e?      ZCy).z(Utilities for file download and caching.    )abstractmethod)closingN)tensor)urlopen)
tf_inspect)Progbar)path_to_string   c                     dd}t        | |      }t        |d      5 } |||      D ]  }|j                  |        	 ddd       y# 1 sw Y   yxY w)a  Replacement for `urlretrieve` for Python 2.

    Under Python 2, `urlretrieve` relies on `FancyURLopener` from legacy
    `urllib` module, known to have issues with proxy management.

    Args:
        url: url to retrieve.
        filename: where to store the retrieved data locally.
        reporthook: a hook function that will be called once on establishment of
          the network connection and once after each block read thereafter. The
          hook will be passed three arguments; a count of blocks transferred so
          far, a block size in bytes, and the total size of the file.
        data: `data` argument passed to `urlopen`.
    Nc              3      K   | j                         j                  d      }d}|t        |j                               }d}	 | j	                  |      }|dz  }|
 ||||       |r| ny +w)NzContent-Lengthr      )infogetintstripread)response
chunk_size
reporthookcontent_type
total_sizecountchunks          X/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/keras/utils/data_utils.py
chunk_readzurlretrieve.<locals>.chunk_read@   s|     ]]_(()9:lj		!++-.
ej)
!
UJ

3+
 s   A+A-wb)r   )i    N)r   openwrite)urlfilenamer   datar   r   fdr   s           r   urlretriever$   0   sW       sD!H	h	 h:> %
  s   !AA)r$   c                 $   t         t        t        t        t        t
        f}t        | t        j                  t        j                  f|z         ryt        j                  |       xs, t        | t              xs t        | t        j                        S )z'Check if `x` is a Keras generator type.F)strlisttupledictset	frozenset
isinstancer   Tensornpndarrayr   isgeneratorSequencetypingIterator)xbuiltin_iteratorss     r   is_generator_or_sequencer6   X   sj    D%sI>FMM2::.1BBC

 
 
# )
Q
!)
Q
(*    c                    |y|dk(  rddg}t        |t              r|g}t        |       } t        |      }|D ]z  }|dk(  r t        j                  }t        j
                  }|dk(  r t        j                  }t        j                  } |       sV |       5 }	 |j                  |       	 ddd        y y# t        j                  t        t        f$ rk t        j                  j                  |      rJt        j                  j!                  |      rt        j"                  |        t%        j&                  |        w xY w# 1 sw Y    yxY w)a6  Extracts an archive if it matches tar, tar.gz, tar.bz, or zip formats.

  Args:
      file_path: path to the archive file
      path: path to extract the archive file
      archive_format: Archive format to try for extracting the file.
          Options are 'auto', 'tar', 'zip', and None.
          'tar' includes tar, tar.gz, and tar.bz files.
          The default 'auto' is ['tar', 'zip'].
          None or an empty list will return no matches found.

  Returns:
      True if a match was found and an archive extraction was completed,
      False otherwise.
  NFautotarzipT)r,   r&   r	   tarfiler   
is_tarfilezipfileZipFile
is_zipfile
extractallTarErrorRuntimeErrorKeyboardInterruptospathexistsisfileremoveshutilrmtree)	file_pathrF   archive_formatarchive_typeopen_fnis_match_fnarchives          r   _extract_archiverR   b   s/     vU^N$$%NY')		$$ lug&&kug&&k99 		


T
"	 '( 
   ,0AB 	WW^^D!ww~~d#iio  mmD!
		 s   EB66B	D??EE	Fc
                    |	=t         j                  j                  t         j                  j                  d      d      }	|||}d}t         j                  j                  |	      }
t        j                  |
t         j
                        s t         j                  j                  dd      }
t         j                  j                  |
|      }t        |       t        |       } |r&t         j                  j                  ||       }|dz   }n t         j                  j                  ||       }d}t         j                  j                  |      r*|*t        |||      st        d	|z   d
z   |z   dz          d}nd}|r9t        d|        G d dt              fd}d}	 	 t        |||       	 d_        |r/t         j                  j                        st5        ||d       |S |rt5        |||       |S # t        j                  j                  $ r5}t!        |j#                  ||j$                  |j&                              d}~wt        j                  j(                  $ r5}t!        |j#                  ||j*                  |j,                              d}~ww xY w# t         t.        f$ r:}t         j                  j                  |      rt        j0                  |        d}~ww xY w)a  Downloads a file from a URL if it not already in the cache.

  By default the file at the url `origin` is downloaded to the
  cache_dir `~/.keras`, placed in the cache_subdir `datasets`,
  and given the filename `fname`. The final location of a file
  `example.txt` would therefore be `~/.keras/datasets/example.txt`.

  Files in tar, tar.gz, tar.bz, and zip formats can also be extracted.
  Passing a hash will verify the file after download. The command line
  programs `shasum` and `sha256sum` can compute the hash.

  Example:

  ```python
  path_to_downloaded_file = tf.keras.utils.get_file(
      "flower_photos",
      "https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz",
      untar=True)
  ```

  Args:
      fname: Name of the file. If an absolute path `/path/to/file.txt` is
          specified the file will be saved at that location.
      origin: Original URL of the file.
      untar: Deprecated in favor of `extract` argument.
          boolean, whether the file should be decompressed
      md5_hash: Deprecated in favor of `file_hash` argument.
          md5 hash of the file for verification
      file_hash: The expected hash string of the file after download.
          The sha256 and md5 hash algorithms are both supported.
      cache_subdir: Subdirectory under the Keras cache dir where the file is
          saved. If an absolute path `/path/to/folder` is
          specified the file will be saved at that location.
      hash_algorithm: Select the hash algorithm to verify the file.
          options are `'md5'`, `'sha256'`, and `'auto'`.
          The default 'auto' detects the hash algorithm in use.
      extract: True tries extracting the file as an Archive, like tar or zip.
      archive_format: Archive format to try for extracting the file.
          Options are `'auto'`, `'tar'`, `'zip'`, and `None`.
          `'tar'` includes tar, tar.gz, and tar.bz files.
          The default `'auto'` corresponds to `['tar', 'zip']`.
          None or an empty list will return no matches found.
      cache_dir: Location to store cached files, when None it
          defaults to the default directory `~/.keras/`.

  Returns:
      Path to the downloaded file
  N~z.kerasmd5z/tmpz.tar.gzF)	algorithmzNA local file was found, but it seems to be incomplete or outdated because the z0 file hash does not match the original value of z! so we will re-download the data.TzDownloading data fromc                       e Zd ZdZy)!get_file.<locals>.ProgressTrackerN)__name__
__module____qualname__progbar r7   r   ProgressTrackerrX      s	     gr7   r^   c                     j                   |dk(  rd }t        |      _         y j                   j                  | |z         y )Nr   )r\   r   update)r   
block_sizer   r^   s      r   dl_progresszget_file.<locals>.dl_progress   sB    		 	 	(*")*"5&&uz'9:r7   z!URL fetch failure on {}: {} -- {}r:   )rM   )rE   rF   join
expanduseraccessW_OK_makedirs_exist_okr	   rG   validate_fileprintobjectr$   urlliberror	HTTPError	ExceptionformatcodemsgURLErrorerrnoreasonrD   rI   r\   rR   )fnameoriginuntarmd5_hash	file_hashcache_subdirhash_algorithmextractrM   	cache_dirdatadir_basedatadiruntar_fpathfpathdownloadrb   	error_msger^   s                     @r   get_filer      sh   t RWW//4h?Ii/IN##I.,	<	)77<<1LGGLL|4'W

%
'',,w.K)#EGGLL%(E(WW^^E5)~F 46DE@ACLM 22 	3 H	
!6*& 
; 4I
EFE;/ #O
77>>+&uge<UG^4	,% \\## A	((?@@\\"" E	((!((CDDE() 		
		%s<   G4 4J0I J!0JJJ K"(5KK"c                 2    t        j                  | d       y )NT)exist_ok)rE   makedirs)r   s    r   rg   rg     s    ++g%r7   c                     | dk(  rt        j                         S | dk(  r$|"t        |      dk(  rt        j                         S t        j                         S )z+Returns hash algorithm as hashlib function.sha256r9   @   )hashlibr   lenrU   )rV   ry   s     r   _resolve_hasherr     sJ    (>>&Y2s9~7K>> 
r7   c                    t        |t              rt        |      }n|}t        | d      5 t	        fdd      D ]  }|j                  |        	 ddd       |j                         S # 1 sw Y   |j                         S xY w)a  Calculates a file sha256 or md5 hash.

  Example:

  ```python
  _hash_file('/path/to/file.zip')
  'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
  ```

  Args:
      fpath: path to the file being validated
      algorithm: hash algorithm, one of `'auto'`, `'sha256'`, or `'md5'`.
          The default `'auto'` detects the hash algorithm in use.
      chunk_size: Bytes to read at a time, important for large files.

  Returns:
      The file hash
  rbc                  &    j                         S N)r   )r   
fpath_files   r   <lambda>z_hash_file.<locals>.<lambda>?  s    jooj9 r7   r7   N)r,   r&   r   r   iterr`   	hexdigest)r   rV   r   hasherr   r   s     `  @r   
_hash_filer   &  s    & 	3Y'FFE4 J93? mmE 
				 
			s   'A--Bc                 b    t        ||      }t        t        | ||            t        |      k(  ryy)a  Validates a file against a sha256 or md5 hash.

  Args:
      fpath: path to the file being validated
      file_hash:  The expected hash string of the file.
          The sha256 and md5 hash algorithms are both supported.
      algorithm: Hash algorithm, one of 'auto', 'sha256', or 'md5'.
          The default 'auto' detects the hash algorithm in use.
      chunk_size: Bytes to read at a time, important for large files.

  Returns:
      Whether the file is valid
  TF)r   r&   r   )r   ry   rV   r   r   s        r   rh   rh   E  s0     9i0&E6:	./3y>Ar7   c                   (    e Zd ZdZd Zd Zd Zd Zy)ThreadsafeIterzEWrap an iterator with a lock and propagate exceptions to all threads.c                 R    || _         t        j                         | _        d | _        y r   )it	threadingLocklock
_exception)selfr   s     r   __init__zThreadsafeIter.__init__^  s     DG DI DOr7   c                     | S r   r]   r   s    r   __iter__zThreadsafeIter.__iter__k  s    Kr7   c                 "    | j                         S r   )__next__r   s    r   nextzThreadsafeIter.nextn  s    ==?r7   c                     | j                   5  | j                  r| j                  	 t        | j                        cd d d        S # t        $ r}|| _         d }~ww xY w# 1 sw Y   y xY wr   )r   r   r   r   rn   )r   r   s     r   r   zThreadsafeIter.__next__q  s`    	 	ooDGG}    s(   AA	AAAAA'N)rY   rZ   r[   __doc__r   r   r   r   r]   r7   r   r   r   [  s    M	r7   r   c                 B     t        j                          fd       }|S )Nc                  &    t         | i |      S r   )r   )akwfs     r   gzthreadsafe_generator.<locals>.g  s    !Q+"+&&r7   	functoolswraps)r   r   s   ` r   threadsafe_generatorr   }  s%    ??1' ' 
(r7   c                   <    e Zd ZdZed        Zed        Zd Zd Zy)r1   a1  Base object for fitting to a sequence of data, such as a dataset.

  Every `Sequence` must implement the `__getitem__` and the `__len__` methods.
  If you want to modify your dataset between epochs you may implement
  `on_epoch_end`.
  The method `__getitem__` should return a complete batch.

  Notes:

  `Sequence` are a safer way to do multiprocessing. This structure guarantees
  that the network will only train once
   on each sample per epoch which is not the case with generators.

  Examples:

  ```python
  from skimage.io import imread
  from skimage.transform import resize
  import numpy as np
  import math

  # Here, `x_set` is list of path to the images
  # and `y_set` are the associated classes.

  class CIFAR10Sequence(Sequence):

      def __init__(self, x_set, y_set, batch_size):
          self.x, self.y = x_set, y_set
          self.batch_size = batch_size

      def __len__(self):
          return math.ceil(len(self.x) / self.batch_size)

      def __getitem__(self, idx):
          batch_x = self.x[idx * self.batch_size:(idx + 1) *
          self.batch_size]
          batch_y = self.y[idx * self.batch_size:(idx + 1) *
          self.batch_size]

          return np.array([
              resize(imread(file_name), (200, 200))
                 for file_name in batch_x]), np.array(batch_y)
  ```
  c                     t         )zGets batch at position `index`.

    Args:
        index: position of the batch in the Sequence.

    Returns:
        A batch
    NotImplementedError)r   indexs     r   __getitem__zSequence.__getitem__  
     r7   c                     t         )zbNumber of batch in the Sequence.

    Returns:
        The number of batches in the Sequence.
    r   r   s    r   __len__zSequence.__len__  s
     r7   c                      y)z-Method called at the end of every epoch.
    Nr]   r   s    r   on_epoch_endzSequence.on_epoch_end  s     	r7   c              #   Z    K    fdt        t                     D        D ]  }|  yw)z2Create a generator that iterate over the Sequence.c              3   (   K   | ]	  }|     y wr   r]   ).0ir   s     r   	<genexpr>z$Sequence.__iter__.<locals>.<genexpr>  s     3Qa3s   N)ranger   )r   items   ` r   r   zSequence.__iter__  s)     3%D	"23 js   (+N)	rY   rZ   r[   r   r   r   r   r   r   r]   r7   r   r1   r1     s:    +Z 	 	  	
r7   r1   c              #   $   K   	 | D ]  }|  w)zIterates indefinitely over a Sequence.

  Args:
    seq: `Sequence` instance.

  Yields:
    Batches of data from the `Sequence`.
  r]   )seqr   s     r   iter_sequence_infiniter     s$      	 j 	s   c                 B     t        j                          fd       }|S )Nc                  f    t         5  t        dc}a | i |}|a|cd d d        S # 1 sw Y   y xY w)NT)_FORCE_THREADPOOL_LOCK_FORCE_THREADPOOL)argskwargsold_force_threadpooloutr   s       r   wrappedz.dont_use_multiprocessing_pool.<locals>.wrapped  sA    	 0A4--tvc.  s   '0r   )r   r   s   ` r   dont_use_multiprocessing_poolr     s%    ??1  
.r7   c                 f    | rt         rt        j                  j                  S t        j                  S r   )r   multiprocessingdummyPool)use_multiprocessings    r   get_pool_classr     s&    	 1  %%%			r7   c                  B    t         t        j                         a t         S )z,Lazily create the queue to track worker ids.)_WORKER_ID_QUEUEr   Queuer]   r7   r   get_worker_id_queuer     s     &,,.	r7   c                     | a y r   _SHARED_SEQUENCES)seqss    r   	init_poolr     s    r7   c                     t         |    |   S )aQ  Get the value from the Sequence `uid` at index `i`.

  To allow multiple Sequences to be used at the same time, we use `uid` to
  get a specific one. A single Sequence would cause the validation to
  overwrite the training Sequence.

  Args:
      uid: int, Sequence identifier
      i: index

  Returns:
      The value at index `i`.
  r   )uidr   s     r   	get_indexr     s     
3		""r7   c                   l    e Zd ZdZ	 ddZd ZddZd ZddZd Z	e
d	        Ze
d
        Ze
d        Zy)SequenceEnqueuera  Base class to enqueue inputs.

  The task of an Enqueuer is to use parallelism to speed up preprocessing.
  This is done with processes or threads.

  Example:

  ```python
      enqueuer = SequenceEnqueuer(...)
      enqueuer.start()
      datas = enqueuer.get()
      for data in datas:
          # Use the inputs; training, evaluating, predicting.
          # ... stop sometime.
      enqueuer.stop()
  ```

  The `enqueuer.get()` should be an infinite stream of datas.
  c                    || _         || _        t        	 t        j                  dd      at        t        t              rt        | _        t        dz  anKt        j                         5  t        j                  | _        t        xj                  dz  c_
        d d d        d| _        d | _        d | _        d | _        d | _        y # t
        $ r daY w xY w# 1 sw Y   >xY w)Nr   r   r   )sequencer   _SEQUENCE_COUNTERr   ValueOSErrorr,   r   r   get_lockvalueworkersexecutor_fnqueue
run_threadstop_signal)r   r   r   s      r   r   zSequenceEnqueuer.__init__8  s    DM2D  +11#q9 #S)"dh1 %%' %$**1$% DLDDJDOD'   	% %s   C )/CCCCc                 X    | j                   d uxr | j                   j                          S r   )r   is_setr   s    r   
is_runningzSequenceEnqueuer.is_runningV  s*    4'I0@0@0G0G0I,IIr7   c                 t   | j                   r| j                        | _        n
fd| _        | _        t	        j
                  |      | _        t        j                         | _        t        j                  | j                        | _        d| j                  _        | j                  j                          y)zStarts the handler's workers.

    Args:
        workers: Number of workers.
        max_queue_size: queue size
            (when full, workers could block on `put()`)
    c                 &     t        d            S )NF)r   )_r   s    r   r   z(SequenceEnqueuer.start.<locals>.<lambda>e  s    #8>%#8#A r7   )targetTN)r   _get_executor_initr   r   r   r   r   Eventr   Thread_runr   daemonstart)r   r   max_queue_sizes    ` r   r  zSequenceEnqueuer.startY  s     009d BdDL^,DJ (D&&dii8DO!DOOOOr7   c                 >    | j                   t        | j                  <   y)z&Sends current Iterable to all workers.N)r   r   r   r   s    r   _send_sequencezSequenceEnqueuer._send_sequencem  s     #'--dhhr7   Nc                    | j                   j                          | j                  j                  5  | j                  j                  j	                          d| j                  _        | j                  j                  j                          ddd       | j                  j                  |       dt        | j                  <   y# 1 sw Y   8xY w)zStops running threads and wait for them to exit, if necessary.

    Should be called by the same thread which called `start()`.

    Args:
        timeout: maximum time to wait on `thread.join()`
    r   N)r   r*   r   mutexclearunfinished_tasksnot_fullnotifyr   rc   r   r   )r   timeouts     r   stopzSequenceEnqueuer.stopr  s     				 #
jj$%djj!
jj  "# 	OO!"&dhh# #s   ACCc                 F    | j                         r| j                          y y r   )r   r  r   s    r   __del__zSequenceEnqueuer.__del__  s    
iik r7   c                     t         )?Submits request to the executor and queue the `Future` objects.r   r   s    r   r   zSequenceEnqueuer._run  s
     r7   c                     t         )Gets the Pool initializer for multiprocessing.

    Args:
        workers: Number of workers.

    Returns:
        Function, a Function to initialize the pool
    r   )r   r   s     r   r   z#SequenceEnqueuer._get_executor_init  r   r7   c                     t         )zCreates a generator to extract data from the queue.

    Skip the data if it is `None`.
    # Returns
        Generator yielding tuples `(inputs, targets)`
            or `(inputs, targets, sample_weights)`.
    r   r   s    r   r   zSequenceEnqueuer.get  s
     r7   )F)r   
   r   )rY   rZ   r[   r   r   r   r  r  r  r  r   r   r   r   r]   r7   r   r   r   #  sg    * $)<J(0
'    	 	  r7   r   c                   <     e Zd ZdZd fd	Zd Zd Zd Zd Z xZ	S )OrderedEnqueuera  Builds a Enqueuer from a Sequence.

  Args:
      sequence: A `tf.keras.utils.data_utils.Sequence` object.
      use_multiprocessing: use multiprocessing if True, otherwise threading
      shuffle: whether to shuffle the data at the beginning of each epoch
  c                 <    t         t        |   ||       || _        y r   )superr  r   shuffle)r   r   r   r  	__class__s       r   r   zOrderedEnqueuer.__init__  s    	/4)(4GHDLr7   c                     fd}|S )r  c                 x     t        d      t        | d t               f      }t        j	                  |       |S NT)initializerinitargs)r   init_pool_generatorr   _DATA_POOLSadd)r   poolr   s     r   pool_fnz3OrderedEnqueuer._get_executor_init.<locals>.pool_fn  s;    !^D!
2$ 3 568d oodkr7   r]   r   r   r$  s    ` r   r   z"OrderedEnqueuer._get_executor_init  s     Nr7   c                     	 t        j                  d       | j                  j                  dk(  s| j                  j                         ryJ)zWait for the queue to be empty.皙?r   N)timesleepr   r  r   r   r   s    r   _wait_queuezOrderedEnqueuer._wait_queue  s;    

jjo		$	$	)T-=-=-D-D-F r7   c           	         t        t        t        | j                                    }| j	                          	 | j
                  rt        j
                  |       t        | j                  t                    5 }|D ]c  }| j                  j                         r
 ddd       y| j                  j                  |j                  t        | j                   |f      d       e | j#                          | j                  j                         r
	 ddd       y	 ddd       | j                  j%                          | j	                          # 1 sw Y   5xY wr  TNblock)r'   r   r   r   r  r  randomr   r   r   r   r   r   putapply_asyncr   r   r*  r   )r   r   executorr   s       r   r   zOrderedEnqueuer._run  s   E#dmm,-.H
	x 4##$567 8 	JA$$& 
 **..""9txxm<D  J		J 	""$
  %  mm  "
+  s   8!E
"A*E

Ec              #   v  K   | j                         rn	 | j                  j                  dd      j                         }| j                         r| j                  j                          || | j                         rmyy# t        j                  $ r Y (t
        $ r}| j                          |d}~ww xY ww)Creates a generator to extract data from the queue.

    Skip the data if it is `None`.

    Yields:
        The next element in the queue, i.e. a tuple
        `(inputs, targets)` or
        `(inputs, targets, sample_weights)`.
    T   r.  r  N)r   r   r   	task_doneEmptyrn   r  )r   inputsr   s      r   r   zOrderedEnqueuer.get  s      //

dA6::<??
**


 , //
 [[  		s;   B9AB /B9 B9B6B9B6B11B66B9)FF)
rY   rZ   r[   r   r   r   r*  r   r   __classcell__r  s   @r   r  r    s!    $6r7   r  c                    | a t        j                         }dj                  |j                        |_        |,t
        j                  j                  ||j                  z          ||j                  |j                  dd       yy)a  Initializer function for pool workers.

  Args:
    gens: State which should be made available to worker processes.
    random_seed: An optional value with which to seed child processes.
    id_queue: A multiprocessing Queue of worker ids. This is used to indicate
      that a worker process was created by Keras and can be terminated using
      the cleanup_all_keras_forkpools utility.
  zKeras_worker_{}NTr'  r6  )
r   r   current_processro   namer.   r/  seedidentr0  )gensrandom_seedid_queueworker_procs       r   r   r     sw     //1+ '--k.>.>?+IINN;!2!223LL""$L< r7   c                 &    t        t        |          S )aK  Gets the next value from the generator `uid`.

  To allow multiple generators to be used at the same time, we use `uid` to
  get a specific one. A single generator would cause the validation to
  overwrite the training generator.

  Args:
      uid: int, generator identifier

  Returns:
      The next value of generator `uid`.
  )r   r   )r   s    r   next_samplerF    s     
$	%%r7   c                   :     e Zd ZdZ	 	 d fd	Zd Zd Zd Z xZS )GeneratorEnqueuera  Builds a queue out of a data generator.

  The provided generator can be finite in which case the class will throw
  a `StopIteration` exception.

  Args:
      generator: a generator function which yields data
      use_multiprocessing: use multiprocessing if True, otherwise threading
      random_seed: Initial seed for workers,
          will be incremented by one for each worker.
  c                 <    t         t        |   ||       || _        y r   )r  rH  r   rB  )r   	generatorr   rB  r  s       r   r   zGeneratorEnqueuer.__init__4  s      

T+I7JK"Dr7   c                       fd}|S )zGets the Pool initializer for multiprocessing.

    Args:
      workers: Number of works.

    Returns:
        A Function to initialize the pool
    c                      t        d      t        | j                  t               f      }t        j                  |       |S r  )r   r   rB  r   r!  r"  )r   r#  r   r   s     r   r$  z5GeneratorEnqueuer._get_executor_init.<locals>.pool_fnC  sB    !^D!
2$**,?,ABDd oodkr7   r]   r%  s   `` r   r   z$GeneratorEnqueuer._get_executor_init:  s     Nr7   c                 <   | j                          t        | j                  t                    5 }	 | j                  j                         r
	 ddd       y| j                  j                  |j                  t        | j                  f      d       a# 1 sw Y   yxY wr,  )r  r   r   r   r   r   r   r0  r1  rF  r   )r   r2  s     r   r   zGeneratorEnqueuer._runK  s    	!!"34	5 H""$
H H
 	

  txxk:$ 	 	H	 H Hs   B=BBc              #     K   	 | j                         r\| j                  j                  d      j                         }| j                  j                          || | j                         r[yy# t        $ r g }| j                  j                         dkD  rI|j                  | j                  j                  d             | j                  j                         dkD  rI|D ]  }|j                           |D cg c]#  }|j                         s|j                         % nc c}w }}|D ]	  }||  Y yt        $ r/}| j                          dt        |      v rt        d      |d}~ww xY ww)r4  Tr-  Nr   zgenerator already executingzyYour generator is NOT thread-safe. Keras requires a thread-safe generator when `use_multiprocessing=False, workers > 1`. )r   r   r   r7  StopIterationqsizeappendwait
successfulrn   r  r&   rC   )r   r9  	last_onesr   futurer   s         r   r   zGeneratorEnqueuer.getV  sM    OOd+//1

,	 OO
  iJJ"d34 JJ"  !	 /8OF6;L;L;N6::<OOiO &,  
iik	&#a&	09: 	: gsT   E7A*A1 /E71A0E4"E4=D&D&%E45E4;E7=E4*E//E44E7)FN)	rY   rZ   r[   r   r   r   r   r   r:  r;  s   @r   rH  rH  '  s%    
 $)#"	H$r7   rH  )NN).r9   )FNNdatasetsr9   Fr9   Nr   )r     )r9   rX  )Dr   abcr   
contextlibr   r   r   r   multiprocessing.dummyrE   r   r/  rJ   sysr<   r   r(  r2   rk   weakrefr>   numpyr.   tensorflow.python.frameworkr   six.moves.urllib.requestr   tensorflow.python.keras.utilsr   +tensorflow.python.keras.utils.generic_utilsr   &tensorflow.python.keras.utils.io_utilsr	   version_infor$   urllib.requestr6   rR   r   rg   r   r   rh   rj   r   r   r1   r   r   r   WeakSetr!  r   r*   _WORKER_IDSr   RLockr   r   r   r   r   r   r   r  r   rF  rH  r]   r7   r   <module>ri     si    /       	    
         . , 4 ? A A!#J )*.f $""@F&	>,V DKv K\    goo e (* 	
#"}v }@V& Vr=6& S( Sr7   