
    BVh:                        d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddlm
Z
 dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ dZej,                  Z edg      d        Z edg      d d       Z edg      d d       Z edg      d d       Z edg      d        Zd dZd dZd dZy)!zUtility functions for training.    )context)dtypes)graph_io)ops)tensor)cond)init_ops)resource_variable_ops)	state_ops)variable_scope)variable_v1)	variables)
tf_logging)	tf_exportglobal_step_read_op_cacheztrain.global_step)v1c                     t        j                         rt        |j                               S t        | j	                  |            S )ax  Small helper to get the global step.

  ```python
  # Create a variable to hold the global_step.
  global_step_tensor = tf.Variable(10, trainable=False, name='global_step')
  # Create a session.
  sess = tf.compat.v1.Session()
  # Initialize the variable
  sess.run(global_step_tensor.initializer)
  # Get the variable value.
  print('global_step: %s' % tf.compat.v1.train.global_step(sess,
  global_step_tensor))

  global_step: 10
  ```

  Args:
    sess: A TensorFlow `Session` object.
    global_step_tensor:  `Tensor` or the `name` of the operation that contains
      the global step.

  Returns:
    The global step value.
  )r   executing_eagerlyintnumpyrun)sessglobal_step_tensors     X/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/training/training_util.pyglobal_stepr   '   s:    4  !'')**	TXX()	**    ztrain.get_global_stepNc                 >   | xs t        j                         } d}| j                  t         j                  j                        }t        |      dk(  r|d   }n+|s	 | j                  d      }nt        j                  d       yt        |       |S # t        $ r Y yw xY w)a	  Get the global step tensor.

  The global step tensor must be an integer variable. We first try to find it
  in the collection `GLOBAL_STEP`, or by name `global_step:0`.

  Args:
    graph: The graph to find the global step in. If missing, use default graph.

  Returns:
    The global step variable, or `None` if none was found.

  Raises:
    TypeError: If the global step tensor has a non-integer type, or if it is not
      a `Variable`.

  @compatibility(TF2)
  With the deprecation of global graphs, TF no longer tracks variables in
  collections. In other words, there are no global variables in TF2. Thus, the
  global step functions have been removed  (`get_or_create_global_step`,
  `create_global_step`, `get_global_step`) . You have two options for migrating:

  1. Create a Keras optimizer, which generates an `iterations` variable. This
     variable is automatically incremented when calling `apply_gradients`.
  2. Manually create and increment a `tf.Variable`.

  Below is an example of migrating away from using a global step to using a
  Keras optimizer:

  Define a dummy model and loss:

  >>> def compute_loss(x):
  ...   v = tf.Variable(3.0)
  ...   y = x * v
  ...   loss = x * 5 - x * v
  ...   return loss, [v]

  Before migrating:

  >>> g = tf.Graph()
  >>> with g.as_default():
  ...   x = tf.compat.v1.placeholder(tf.float32, [])
  ...   loss, var_list = compute_loss(x)
  ...   global_step = tf.compat.v1.train.get_or_create_global_step()
  ...   global_init = tf.compat.v1.global_variables_initializer()
  ...   optimizer = tf.compat.v1.train.GradientDescentOptimizer(0.1)
  ...   train_op = optimizer.minimize(loss, global_step, var_list)
  >>> sess = tf.compat.v1.Session(graph=g)
  >>> sess.run(global_init)
  >>> print("before training:", sess.run(global_step))
  before training: 0
  >>> sess.run(train_op, feed_dict={x: 3})
  >>> print("after training:", sess.run(global_step))
  after training: 1

  Using `get_global_step`:

  >>> with g.as_default():
  ...   print(sess.run(tf.compat.v1.train.get_global_step()))
  1

  Migrating to a Keras optimizer:

  >>> optimizer = tf.keras.optimizers.SGD(.01)
  >>> print("before training:", optimizer.iterations.numpy())
  before training: 0
  >>> with tf.GradientTape() as tape:
  ...   loss, var_list = compute_loss(3)
  ...   grads = tape.gradient(loss, var_list)
  ...   optimizer.apply_gradients(zip(grads, var_list))
  >>> print("after training:", optimizer.iterations.numpy())
  after training: 1

  @end_compatibility
  N   r   zglobal_step:0z+Multiple tensors in global_step collection.)r   get_default_graphget_collection	GraphKeysGLOBAL_STEPlenget_tensor_by_nameKeyErrorloggingerrorassert_global_step)graphr   global_step_tensorss      r   get_global_stepr+   F   s    X 
*3((*%,,S]]-F-FG	",Q/ 33OD MM?@'(	  s   B 	BBztrain.create_global_stepc                     | xs t        j                         } t        |       t        d      t	        j
                         rt        j                  d      5  t        j                  t         j                  j                  g t        j                  t        j                         dt        j                   j"                  t         j                  j$                  t         j                  j                  g      cddd       S | j'                         5 }|j)                  d      5  t        j                  t         j                  j                  g t        j                  t        j                         dt        j                   j"                  t         j                  j$                  t         j                  j                  g      cddd       cddd       S # 1 sw Y   xY w# 1 sw Y   nxY wddd       y# 1 sw Y   yxY w)a  Create global step tensor in graph.

  Args:
    graph: The graph in which to create the global step tensor. If missing, use
      default graph.

  Returns:
    Global step tensor.

  Raises:
    ValueError: if global step tensor is already defined.

  @compatibility(TF2)
  With the deprecation of global graphs, TF no longer tracks variables in
  collections. In other words, there are no global variables in TF2. Thus, the
  global step functions have been removed  (`get_or_create_global_step`,
  `create_global_step`, `get_global_step`) . You have two options for migrating:

  1. Create a Keras optimizer, which generates an `iterations` variable. This
     variable is automatically incremented when calling `apply_gradients`.
  2. Manually create and increment a `tf.Variable`.

  Below is an example of migrating away from using a global step to using a
  Keras optimizer:

  Define a dummy model and loss:

  >>> def compute_loss(x):
  ...   v = tf.Variable(3.0)
  ...   y = x * v
  ...   loss = x * 5 - x * v
  ...   return loss, [v]

  Before migrating:

  >>> g = tf.Graph()
  >>> with g.as_default():
  ...   x = tf.compat.v1.placeholder(tf.float32, [])
  ...   loss, var_list = compute_loss(x)
  ...   global_step = tf.compat.v1.train.create_global_step()
  ...   global_init = tf.compat.v1.global_variables_initializer()
  ...   optimizer = tf.compat.v1.train.GradientDescentOptimizer(0.1)
  ...   train_op = optimizer.minimize(loss, global_step, var_list)
  >>> sess = tf.compat.v1.Session(graph=g)
  >>> sess.run(global_init)
  >>> print("before training:", sess.run(global_step))
  before training: 0
  >>> sess.run(train_op, feed_dict={x: 3})
  >>> print("after training:", sess.run(global_step))
  after training: 1

  Migrating to a Keras optimizer:

  >>> optimizer = tf.keras.optimizers.SGD(.01)
  >>> print("before training:", optimizer.iterations.numpy())
  before training: 0
  >>> with tf.GradientTape() as tape:
  ...   loss, var_list = compute_loss(3)
  ...   grads = tape.gradient(loss, var_list)
  ...   optimizer.apply_gradients(zip(grads, var_list))
  >>> print("after training:", optimizer.iterations.numpy())
  after training: 1

  @end_compatibility
  Nz"global_step" already exists.zcpu:0F)shapedtypeinitializer	trainableaggregationcollections)r   r   r+   
ValueErrorr   r   devicer   get_variabler!   r"   r   int64r	   zeros_initializerr   VariableAggregationONLY_FIRST_REPLICAGLOBAL_VARIABLES
as_default
name_scope)r)   gs     r   create_global_stepr>      s{   F 
*3((*%U'
4
55 	G	 
((
--
#
#00233FFmm,,cmm.G.G	
 
  QQT 2 Q&&!!ll..011DD]]33S]]5N5NOQQ Q Q
 
Q Q Q Q Qs2   BGG4"BG 	G4GG(	$G44G=ztrain.get_or_create_global_stepc                 f    | xs t        j                         } t        |       }|t        |       }|S )aj  Returns and create (if necessary) the global step tensor.

  Args:
    graph: The graph in which to create the global step tensor. If missing, use
      default graph.

  Returns:
    The global step tensor.

  @compatibility(TF2)
  With the deprecation of global graphs, TF no longer tracks variables in
  collections. In other words, there are no global variables in TF2. Thus, the
  global step functions have been removed  (`get_or_create_global_step`,
  `create_global_step`, `get_global_step`) . You have two options for migrating:

  1. Create a Keras optimizer, which generates an `iterations` variable. This
     variable is automatically incremented when calling `apply_gradients`.
  2. Manually create and increment a `tf.Variable`.

  Below is an example of migrating away from using a global step to using a
  Keras optimizer:

  Define a dummy model and loss:

  >>> def compute_loss(x):
  ...   v = tf.Variable(3.0)
  ...   y = x * v
  ...   loss = x * 5 - x * v
  ...   return loss, [v]

  Before migrating:

  >>> g = tf.Graph()
  >>> with g.as_default():
  ...   x = tf.compat.v1.placeholder(tf.float32, [])
  ...   loss, var_list = compute_loss(x)
  ...   global_step = tf.compat.v1.train.get_or_create_global_step()
  ...   global_init = tf.compat.v1.global_variables_initializer()
  ...   optimizer = tf.compat.v1.train.GradientDescentOptimizer(0.1)
  ...   train_op = optimizer.minimize(loss, global_step, var_list)
  >>> sess = tf.compat.v1.Session(graph=g)
  >>> sess.run(global_init)
  >>> print("before training:", sess.run(global_step))
  before training: 0
  >>> sess.run(train_op, feed_dict={x: 3})
  >>> print("after training:", sess.run(global_step))
  after training: 1

  Migrating to a Keras optimizer:

  >>> optimizer = tf.keras.optimizers.SGD(.01)
  >>> print("before training:", optimizer.iterations.numpy())
  before training: 0
  >>> with tf.GradientTape() as tape:
  ...   loss, var_list = compute_loss(3)
  ...   grads = tape.gradient(loss, var_list)
  ...   optimizer.apply_gradients(zip(grads, var_list))
  >>> print("after training:", optimizer.iterations.numpy())
  after training: 1

  @end_compatibility
  )r   r   r+   r>   )r)   r   s     r   get_or_create_global_stepr@     s:    @ 
*3((*%&u-+E2	r   ztrain.assert_global_stepc                    t        | t        j                        s=t        | t        j                        s#t        j                  |       st        d| z        | j                  j                  j                  st        d| j                  z        | j                         j                  dk7  r;| j                         j                         rt        d| j                         z        yy)zzAsserts `global_step_tensor` is a scalar int `Variable` or `Tensor`.

  Args:
    global_step_tensor: `Tensor` to test.
  z8Existing "global_step" must be a Variable or Tensor: %s.z5Existing "global_step" does not have integer type: %sr   z(Existing "global_step" is not scalar: %sN)
isinstancer   Variabler   Tensorr
   is_resource_variable	TypeErrorr.   
base_dtype
is_integer	get_shapendimsis_fully_definedr   s    r   r(   r(   I  s     '););
<
'
7

4
45G
H
N&' ( ( 
	!	!	,	,	7	7
K&,,- . . ""$**a/""$557
>&0023 4 4 8 0r   c                     | xs t        j                         } | j                  t              }t	        |      dkD  rt        dj                  t                    t	        |      dk(  r|d   S y)a  Gets global step read tensor in graph.

  Args:
    graph: The graph in which to create the global step read tensor. If missing,
      use default graph.

  Returns:
    Global step read tensor.

  Raises:
    RuntimeError: if multiple items found in collection GLOBAL_STEP_READ_KEY.
  r   zDThere are multiple items in collection {}. There should be only one.r   N)r   r   r    GLOBAL_STEP_READ_KEYr#   RuntimeErrorformat)r)   global_step_read_tensorss     r   _get_global_step_readrR   `  st     
*3((*%"112FG	!"Q&
 3396:N3OQ Q 		!"a'#A&&	r   c           	         | xs t        j                         } t        |       }||S t        |       y| j	                         5 }|j                  d      5  |j                  j                  j                  dz         5  t        t        j                        r8t        j                  t        j                        j                  fd      }n}|dz   }t        j                  t         |       ddd       ddd       ddd       t        |       S # 1 sw Y   $xY w# 1 sw Y   (xY w# 1 sw Y   t        |       S xY w)a   Gets or creates global step read tensor in graph.

  Args:
    graph: The graph in which to create the global step read tensor. If missing,
      use default graph.

  Returns:
    Global step read tensor if there is global_step_tensor else return None.
  N/c                       j                   S N)initial_valuerL   s   r   <lambda>z1_get_or_create_global_step_read.<locals>.<lambda>  s    &44 r   r   )r   r   rR   r+   r;   r<   opnamerB   r   rC   r   r   is_variable_initialized
read_valueadd_to_collectionrN   )r)   global_step_read_tensorr=   global_step_valuer   s       @r   _get_or_create_global_step_readr`   x  s9    
*3((*%1%8(""&u- KQT 2 K	
(++0036	7 K	&	(:(:	; II//0BC))46
 / 1A 5	02IJKK K 
u	%%K KK K K 
u	%%s=   D1)D%A4D6D%>D1D"D%%D.	*D11Ec           	      J   |xs t        j                         }t        |      }|t        d      t	        |      }|j                         5 }|j                  d       5  |j                  |j                  j                  dz         5  t        j                  |g      5  t        j                  ||       cd d d        cd d d        cd d d        cd d d        S # 1 sw Y   nxY w	 d d d        n# 1 sw Y   nxY wd d d        n# 1 sw Y   nxY wd d d        y # 1 sw Y   y xY w)NzdGlobal step tensor should be created by tf.train.get_or_create_global_step before calling increment.rT   )r   r   r+   r3   r`   r;   r<   rY   rZ   control_dependenciesr   
assign_add)	incrementr)   r   r^   r=   s        r   _increment_global_stepre     s0   

*3((*%&u-
	GH H <EB CQT 2 C	
(++0036	7 C##%<$=> C##$6	BC CC CC C CC C CC C CC C C C C Cs`   D)DC/C	4	C/=	D	DC"C/&	D/C84D;	DD		DD"rV   )__doc__tensorflow.python.eagerr   tensorflow.python.frameworkr   r   r   r   tensorflow.python.opsr   r	   r
   r   r   r   r   tensorflow.python.platformr   r&    tensorflow.python.util.tf_exportr   rN   write_graphr   r+   r>   r@   r(   rR   r`   re    r   r   <module>rn      s    & + . 0 + . & * 7 + 0 - + < 6 3  "" "#$+ %+< &'(Z )Zz )*+ZQ ,ZQz 012C 3CL )*+4 ,4,0&DCr   