
    AVh                    4   d Z ddlZddlZddlmZ ddlZddlmZ ddlm	Z	 ddl
mZ ddlmZ ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ  ddlm!Z! ddlm"Z" ddlm#Z# ddlm$Z$ ddlm%Z& ddlm'Z' ddlm(Z( ddlm)Z) ddlm*Z* ddlm+Z, ddlm-Z- ddlm.Z. ddl/m0Z0 ddl/m1Z1 dd l/m2Z2 dd!l/m3Z3 dd"l/m4Z4 dd#l/m5Z5 dd$l/m6Z6 dd%l7 dd&l8m9Z9 dd'l:m;Z< dd(l=m>Z> ddl?mZ dd)l?m@Z@ dd*lAmBZB dd+lCmDZD  e j                  d,        e j                  d-        e j                  d.        e j                  d/        e j                  d0       e4j                  ZFd1 ZGd2 ZHd3 ZI	 d]d4ZJd5 ZKej                  d6        ZM G d7 d8      ZNd]d9ZOd: ZPd; ZQd]d<ZR G d= d>e6j                  e>j                        ZU G d? d@e"j                        ZW G dA dBeUe!j                        ZY G dC dDeU      ZZd^dEZ[ e-j                  eUe[        G dF dGeU      Z] e*j                  d,      dH        Z_d]dIZ` e*j                  d.      dJ        Za eDdKg L      dM        ZbdN Zc e*j                  dO        e*j                  dP        e*j                  d-        G dQ dR      Ze G dS dTee      Zf G dU dVee      Zg G dW dXe,j                        Zi e9j                   e9j                  eiej                  j                               	 d_dYZndZeUd[efd\Zoy)`z"Ops to use variables as resources.    N)Any)logging)gen_xla_ops)flags)attr_value_pb2)variable_pb2)
trace_type)
struct_pb2)tensor_callable)pywrap_tf_session)compat)context)record)tape)auto_control_deps_utils)composite_tensor)composite_tensor_gradient)constant_op)cpp_shape_inference_pb2)device)dtypes)errors)indexed_slices)ops)tensor)tensor_conversion_registry)tensor_shape)	array_ops)gen_array_ops)gen_resource_variable_ops)gen_state_ops)handle_data_util)	state_ops)	variables)*)nested_structure_coder)base)core)numpy_compat)
deprecated)	tf_exportReadVariableOpVariableShapeResourceGatherResourceGatherNd_ReadVariablesOpc                     t        | t        j                        sJ t        | t        j                        r| j
                  S t        |       S )z-Get the data handle from the Tensor `handle`.)
isinstancetensor_moduleTensorr   EagerTensor_handle_dataget_resource_handle_datahandles    [/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/ops/resource_variable_ops.pyget_eager_safe_handle_datar;   O   s=    	FM00	11	1(#F++    c                 j   || _         |syt        |j                  D cg c]  }|j                  |j                  f c} \  }}|D cg c]%  }|j
                  st        |j                        nd' }}|D cg c]4  }|j
                  s$|j                  D cg c]  }|j                   c}nd6 }}}| j                  j                  j                  j                         5 }	t        j                  |	| j                         |||       ddd       yc c}w c c}w c c}w c c}}w # 1 sw Y   yxY w)zSets the shape inference result HandleData on tensor.

  Args:
    tensor: A `Tensor` or `EagerTensor`.
    handle_data: A `CppShapeInferenceResult.HandleData`.
    graph_mode: A python bool.
  N)r6   zipshape_and_typeshapedtypeunknown_ranklendimsize_opgraph_c_graphgetr   -TF_GraphSetOutputHandleShapesAndTypes_wrapper_as_tf_output)
r   handle_data
graph_modepairshapestypessranksdc_graphs
             r:   _set_handle_shapes_and_typesrV   Y   s    $&	
 -8-G-GHTTZZ HJ-&%=C
DQ^^3quu:3
D%
D 06*+Q^^ uu!qvv!%&&  zz  $$& 'CC  I
D s/   D*D9D#D+D##(D)D#)D2c                 ~   | j                   t        j                  k(  sJ t        |       }|j                   t        j                  k7  r|S t        |      }|f|j
                  rZ|$|j
                  rt        |j                        dk7  rt        d| d      |j                  j                  |j                         |S )a   Concats HandleData from tensors `handle` and `initial_value`.

  Args:
    handle: A `Tensor` of dtype `resource`.
    initial_value: A `Tensor`.

  Returns:
    A `CppShapeInferenceResult.HandleData`.  If `initial_value` has dtype
    `variant`, the `HandleData` contains the concatenation of the shape_and_type
    from both `handle` and `initial_value`.

  Raises:
    RuntimeError: If handle, which was returned by VarHandleOp, either has
      no handle data, or its len(handle_data.shape_and_type) != 1.
     EExpected VarHandleOp to return a length==1 shape_and_type, but saw: '')
rB   r   resourcer;   variantis_setrD   r@   RuntimeErrorextend)r9   initial_valuevariable_handle_dataextra_handle_datas       r:   _combine_handle_datarc   v   s      
	((	(3F;FNN*0?"'8'?'?$,@,G,G //0A5+,A/0 0 ''../@/O/OP	r<   c                    t        j                         j                  }|d}t        j                  |       } t        j                  |      }|s.|t        j                  ddd      t        j                         }t        j                  | |||||      }||}|rt        ||      }t        |||       |S t        j                   | |      }	||j"                  t
        j$                  k(  rqt'        |      }
|
d|
j(                  rX|	j(                  rt+        |	j,                        dk7  rt/        d|	 d      |	j,                  j1                  |
j,                         t        ||	|       |S )	zFCreate a variable handle, copying in handle data from `initial_value`.N zDUsing an explicit shared_name is not allowed when executing eagerly.)node_defopmessage)rA   rB   shared_name
debug_namename	containerrX   rY   rZ   )r   get_default_graph
_containerr   as_shaper   as_dtyper   InternalErrorr   anonymous_namer    var_handle_oprc   rV   r"   create_handle_datarB   r\   r;   r]   rD   r@   r^   r_   )rA   rB   ri   rk   rN   r`   rl   r9   full_handle_datarM   rb   s              r:   %_variable_handle_from_shape_and_dtyperv      ss    ##%00)I



&%
//%
 %	  01 1
 ((*K$22& M+FMB )9:FM"55eUCK ]%8%8FNN%J4]C		&+<+C+C""c+*D*D&E&J&-q*+ + 	""))*;*J*JK jAMr<   c                 N    | j                   j                  }t        ||||||       S )az  Creates a variable handle with information to do shape inference.

  The dtype is read from `initial_value` and stored in the returned
  resource tensor's handle data.

  If `initial_value.dtype == tf.variant`, we additionally extract the handle
  data (if any) from `initial_value` and append it to the `handle_data`.
  In this case, the returned tensor's handle data is in the form

  ```
  is_set: true
  shape_and_type {
    shape {
      // initial_value.shape
    }
    dtype: DT_VARIANT
  }
  shape_and_type {
    // handle_data(initial_value).shape_and_type[0]
  }
  shape_and_type {
    // handle_data(initial_value).shape_and_type[1]
  }
  ...
  ```

  Ops that read from this tensor, such as `ReadVariableOp` and
  `AssignVariableOp`, know that `handle_data(handle).shape_and_type[1:]`
  correspond to the handle data of the variant(s) stored in the Variable.

  Args:
    initial_value: A `Tensor`.
    shape: The shape of the handle data. Can be `TensorShape(None)` (i.e.
      unknown shape).
    shared_name: A string.
    name: A string.
    graph_mode: A python bool.

  Returns:
    The handle, a `Tensor` of type `resource`.
  )rB   
base_dtyperv   )r`   rA   ri   rk   rN   rB   s         r:   eager_safe_variable_handlery      s2    V 


(
(%	.ue[$/9=
J Jr<   c              #      K   t        j                         s.t        | t        j                        st        j
                         rd  y | j                  j                         5  d  d d d        y # 1 sw Y   y xY wwN)r   executing_eagerlyr2   r   r5   has_default_graphrH   
as_defaultr8   s    r:   _handle_graphr      sW      !Z%H				 	 	"   s   A"A>$A2)	A>2A;7A>c                   $    e Zd ZdZg dZd Zd Zy)EagerResourceDeletera  An object which cleans up a resource handle.

  An alternative to defining a __del__ method on an object. The intended use is
  that ResourceVariables or other objects with resource handles will maintain a
  single reference to this object. When the parent object is collected, this
  object will be too. Even if the parent object is part of a reference cycle,
  the cycle will be collectable.
  )_handle_handle_device_contextc                     t        |t        j                        st        d| d      || _        || _        t        j                         | _        y )NzPassed handle=zG to EagerResourceDeleter. Was expecting the handle to be a `tf.Tensor`.)r2   r3   r4   
ValueErrorr   r   r   r   )selfr9   handle_devices      r:   __init__zEagerResourceDeleter.__init__  sQ    fm223F8 $- .0 0 DL'D OO%DMr<   c                    	 t        | j                  t        j                        r| j                  j                  ry t        j                         5  t        j                  | j                        5  t        j                  | j                  d       d d d        d d d        y # 1 sw Y   xY w# 1 sw Y   y xY w# t        $ r Y y t        $ r Y y w xY w)NT)ignore_lookup_error)r2   r   r   r5   	is_packedr   
eager_moder   r   r    destroy_resource_op	TypeErrorAttributeErrorr   s    r:   __del__zEagerResourceDeleter.__del__  s    	DLL#//	2t||7M7M  6ZZ++, 	6
#
7
7ll6	66 6	6 	66 6    
sR   :B< B<  B01"B$B0B< $B-	)B00B95B< 9B< <	CCCN)__name__
__module____qualname____doc__	__slots__r   r    r<   r:   r   r     s     8)
&r<   r   c                     t        |       5  t        j                  |      }ddd       |j                  j                         t        j                  | ||      S # 1 sw Y   <xY w)z<Helper that checks shape compatibility and assigns variable.Nrk   )r   r   convert_to_tensorassert_is_compatible_withrA   r    assign_variable_op)r9   rA   valuerk   value_tensors        r:   !shape_safe_assign_variable_handler   8  s]    V 0((/L0!!,"4"45	"	5	5l
' '0 0s   AA&c                     | t         j                  k(  ret        |      }|j                  rMt	        |j
                        dkD  r4t        j                  j                  d|j
                  dd        |_	        y y y y )NrX   Tr]   r@   )
r   r\   r;   r]   rD   r@   r   CppShapeInferenceResult
HandleDatar6   )rB   r9   r   rM   s       r:   _maybe_set_handle_datar   A  sx    
fnn -V4Kc+"<"<=A
!
9
9
D
D+*D*DQR*H E J  B	 r<   c                     t        t        j                         d      r#t        j                         j                  |        | j                  rt        j                  |        yy)z@Records that `variable` was accessed for the tape and FuncGraph.watch_variableN)hasattrr   rm   r   	trainabler   variable_accessed)variables    r:   r   r   L  sI    S""$&67**848$ r<   c                 4   | J |j                  dd      }|j                  dd      }|j                  dd      }|j                  dd      }|j                  dd      }|j                  dd      }|j                  d	d      }|j                  d
d      }	|j                  dd      }
|j                  dd      }|j                  dd      }|j                  dd      }|j                  dd      }|j                  dd      }t        |||||||
||	|||||      S )zDefault variable creator.Nr`   r   validate_shapeTcaching_devicerk   variable_defrB   import_scope
constraintdistribute_strategysynchronizationaggregationrA   $experimental_enable_variable_lifting)r`   r   r   r   rk   rB   r   r   r   r   r   r   rA   r   )rJ   ResourceVariable)next_creatorkwargsr`   r   r   r   rk   r   rB   r   r   r   r   r   rA   r   s                   r:   default_variable_creator_v2r   T  s0   			**_d3-jjd+)::.5.::.5.	FD	!$ND1,
**Wd
#%ND1,zz,-*

#8$?JJ0$7/

=$/+
**Wd
#%)/,d*4& 
!##-%+O
 r<   c                   *   e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 dSdZd Zd Zd Zd Ze	j                  d        ZdTd	Zd
 Zd Zd Zd Zed        Zed        Zed        Zed        Zed        Zd Zd Zd Zed        Zed        Zd Zd Zed        Zed        Zed        Z ede!jD                  fd       Z#ed        Z$ed         Z%ed!        Z&dTd"Z'd# Z( e)dd$      d%        Z*d& Z+	 	 dUd'Z,d( Z-d) Z.dVd*Z/d+ Z0d, Z1dTd-Z2dTd.Z3dTd/Z4e5dTd0       Z6d1Z7dTd2Z8dWd3Z9dWd4Z:d5 Z;dWd6Z<d7 Z=dXd8Z>dXd9Z?dXd:Z@dXd;ZAdXd<ZBdXd=ZCdXd>ZDdXd?ZEdTd@ZFdTdAZGdTdBZHdTdCZIdTdDZJdE ZKdF ZLdG ZMdH ZNdI ZOdJ ZPdYdKZQdL ZRdM ZSdN ZTdO ZUdP ZVdQ ZWdR ZXy)ZBaseResourceVariablez*A python variable from an existing handle.Nc                    |8t        j                         5  t        j                          | _        ddd       n|| _        t        j                  ||||	      \  }}}|| _        || _        || _	        || _
        || _        || _        || _        || _        || _        || _        || _        t        j$                         j&                  | _        t)        j*                  |      | _        t/        j0                  |      | _        || _        |
| _        |d| _        n
|dz   | _        || _        d| _        || _        d| _         d| _!        y# 1 sw Y   xY w)a  Creates a variable from a handle.

    Args:
      trainable: If `True`, GradientTapes automatically watch uses of this
        Variable.
      shape: The variable's shape. This shape can be set to tf.TensorShape(None)
        in order to assign values of different shapes to this variable.
        Otherwise (i.e. if the shape is fully determined), it will trigger run
        time checks to ensure that each assignment is of the same shape.
      dtype: The variable's dtype.
      handle: The variable's handle
      constraint: An optional projection function to be applied to the variable
        after being updated by an `Optimizer` (e.g. used to implement norm
        constraints or value constraints for layer weights). The function must
        take as input the unprojected Tensor representing the value of the
        variable and return the Tensor for the projected value (which must have
        the same shape). Constraints are not safe to use when doing asynchronous
        distributed training.
      synchronization: Indicates when a distributed a variable will be
        aggregated. Accepted values are constants defined in the class
        `tf.VariableSynchronization`. By default the synchronization is set to
        `AUTO` and the current `DistributionStrategy` chooses when to
        synchronize.
      aggregation: Indicates how a distributed variable will be aggregated.
        Accepted values are constants defined in the class
        `tf.VariableAggregation`.
      distribute_strategy: The distribution strategy this variable was created
        under.
      name: The name for this variable.
      unique_id: Internal. Unique ID for this variable's handle.
      handle_name: The name for the variable's handle.
      graph_element: Optional, required only in session.run-mode. Pre-created
        tensor which reads this variable's value.
      initial_value: Optional. Variable's initial value.
      initializer_op: Operation which assigns the variable's initial value.
      is_initialized_op: Pre-created operation to check whether this variable is
        initialized.
      cached_value: Pre-created operation to read this variable in a specific
        device.
      save_slice_info: Metadata for variable partitioning.
      caching_device: Optional device string or function describing where the
        Variable should be cached for reading.  Defaults to the Variable's
        device.  If not `None`, caches on another device.  Typical use is to
        cache on the device where the Ops using the Variable reside, to
        deduplicate copying through `Switch` and other conditional statements.
      in_graph_mode: whether we are executing in TF1 graph mode. If None, will
        detect within the function. This is to avoid repeated init_scope()
        context entrances which can add up.
      validate_shape: If `False`, allows the variable to be initialized with a
        value of unknown shape. If `True`, the default, the shape of
        `initial_value` must be known.
    Nz
Variable:0:0F)"r   
init_scoper   r|   _in_graph_moder$   .validate_synchronization_aggregation_trainable
_trainable_synchronization_aggregation_save_slice_info_initial_value_initializer_op_is_initialized_op_graph_element_caching_device_cached_value_distribute_strategyrm   
_graph_keyr   ro   _shaper   rp   _dtyper   
_unique_id_handle_name_constraint_cached_shape_as_list_validate_shape_xla_sharding_variable_read)r   r   rA   rB   r9   r   r   r   r   rk   	unique_idhandle_namegraph_elementr`   initializer_opis_initialized_opcached_valuesave_slice_infor   in_graph_moder   unused_kwargss                         r:   r   zBaseResourceVariable.__init__}  sT   V >> >")";";"==> > *d@@[)T	; ,O[)  DO+D#D+D'D)D/D'D)D%D 3D ++-88DO''.DK//%(DKDLDO&d%,d!D!%D)DDDC> >s   EEc                     | j                   S r{   )r   r   s    r:   _get_xla_shardingz&BaseResourceVariable._get_xla_sharding  s    r<   c                     | j                   r4t        j                         s t        j                  d| j
                         || _        y)a  Annotates this `ResourceVariable` with `xla_sharding`.

    `xla_sharding` will be used to create an `XlaShardingOp` whenever a
    `ReadVariableOp` is created.

    Args:
      xla_sharding: The xla.OpSharding proto to annotate this ResourceVariable
        with.
    a  This variable (%s) has already been read (ie. a ReadVariableOp has already been generated) and a new XlaShardingOp using this sharding will not be created unless it is read again. If that's not possible, please set the XLA sharding before reading the variable.N)r   r   r|   r   warningrk   r   )r   xla_shardings     r:   _set_xla_shardingz&BaseResourceVariable._set_xla_sharding  s=     7#<#<#>ooG )) &Dr<   c           	         t        j                         r| j                  s	 t        j                  | j                        5  t        j
                  | j                         d      }d d d        d| j                  d| j                         d| j                  j                  dd	S d| j                  d| j                         d| j                  j                  dS # 1 sw Y   |xY w#  d}Y xY w)	NT)is_reprznumpy=<unavailable>z<tf.Variable 'z' shape=z dtype=z, >)
r   r|   r   r   r   
value_text
read_valuerk   	get_shaperB   )r   r   s     r:   __repr__zBaseResourceVariable.__repr__  s      "4+>+>+ZZ$ 	G~~doo&7F*	G ))T^^%tzz
D D ))T^^%tzz8 8	G 	G+*
s#   C. &C"'C. "C+'C. .C4c                     |j                  | j                  j                        }|j                  ||        t	        | j
                  | j                  | j                  |      S )NrA   rB   r   alias_id)alias_global_idr   _idadd_placeholderVariableSpecrA   rB   r   )r   signature_contextr   s      r:   __tf_tracing_type__z(BaseResourceVariable.__tf_tracing_type__  sR     001A1ABH %%h5djj"jj"&..!)+ +r<   c              #      K   | j                   .t        j                  | j                   g      5  d ddd       yd y# 1 sw Y   yxY ww)zMakes assignments depend on the cached value, if any.

    This prevents undefined behavior with reads not ordered wrt writes.

    Yields:
      None.
    N)r   r   control_dependenciesr   s    r:   _assign_dependenciesz)BaseResourceVariable._assign_dependencies   sL      %##T%7%7$89    s   -AAAAAc                 L    t        j                  | j                         |      S )zAllows direct conversion to a numpy array.

    >>> np.array(tf.Variable([1.0]))
    array([1.], dtype=float32)

    Returns:
      The variable value as a numpy array.
    rB   )r)   
np_asarraynumpy)r   rB   s     r:   	__array__zBaseResourceVariable.__array__/  s     ""4::<u==r<   c                 "    | j                         S r{   )__bool__r   s    r:   __nonzero__z BaseResourceVariable.__nonzero__@  s    ==?r<   c                 4    t        | j                               S r{   )boolr   r   s    r:   r   zBaseResourceVariable.__bool__C  s    !""r<   c                     | S r{   r   r   s    r:   __copy__zBaseResourceVariable.__copy__F  s    Kr<   c           
      0   t        j                         st        d      t        | j	                         | j
                  | j                  | j                  | j                  | j                  | j                  | j                        }||| j                  <   |S )NzA__deepcopy__() is only available when eager execution is enabled.)r`   r   r   rB   rk   r   r   r   )r   r|   NotImplementedErrorr   r   r   r   r   _shared_namer   r   r   r   )r   memocopied_variables      r:   __deepcopy__z!BaseResourceVariable.__deepcopy__I  s    $$&
MO O&oo'//##kk 55,,$$&O ,Dr<   c                     | j                   S )zThe dtype of this variable.)r   r   s    r:   rB   zBaseResourceVariable.dtypeY       ;;r<   c                 .    | j                   j                  S )zThe device this variable is on.)r9   r   r   s    r:   r   zBaseResourceVariable.device^  s     ;;r<   c                 .    | j                   j                  S )zThe `Graph` of this variable.)r9   rH   r   s    r:   rH   zBaseResourceVariable.graphc  s     ;;r<   c                     | j                   S )z)The name of the handle for this variable.)r   r   s    r:   rk   zBaseResourceVariable.nameh  s     r<   c                     | j                   S )zThe shape of this variable.)r   r   s    r:   rA   zBaseResourceVariable.shapem  r  r<   c                 D    | j                   j                  |      | _         y r{   )r   
merge_withr   rA   s     r:   	set_shapezBaseResourceVariable.set_shaper  s    ++((/DKr<   c                     | j                   j                  y | j                   j                  D cg c]  }|j                   c}S c c}w r{   )rA   ndimsdimsr   )r   rE   s     r:   _shape_as_listz#BaseResourceVariable._shape_as_listu  s5    zz!%1#CII111s   Ac                 >    | j                         }|y t        |      S r{   )r  tupler  s     r:   _shape_tuplez!BaseResourceVariable._shape_tuplez  s"    !E}<r<   c                 H    | j                   st        d      | j                  S )2The op responsible for initializing this variable.@This operation is not supported when eager execution is enabled.)r   r^   r   r   s    r:   createzBaseResourceVariable.create  s+      < = =r<   c                     | j                   S )z2The handle by which this variable can be accessed.)r   r   s    r:   r9   zBaseResourceVariable.handle  s     <<r<   c                     | j                   | j                   S t        j                  dd      5  | j                         cddd       S # 1 sw Y   yxY w)z:A cached operation which reads the value of this variable.NTignore_existing)r   r   colocate_with_read_variable_opr   s    r:   r   zBaseResourceVariable.value  sK    %			4	6 &##%& & &s   A

Ac                     | j                   S )z1Conversion function for Graph.as_graph_element().)r   r   s    r:   _as_graph_elementz&BaseResourceVariable._as_graph_element  s    r<   c                     | j                   S )r  )r   r   s    r:   initializerz BaseResourceVariable.initializer  s     r<   c                 X    t        j                         rt        d      | j                  S )z>Returns the Tensor used as the initial value for the variable.z?This property is not supported when eager execution is enabled.)r   r|   r^   r   r   s    r:   r`   z"BaseResourceVariable.initial_value  s.       " < = =r<   c                     | j                   S )zReturns the constraint function associated with this variable.

    Returns:
      The constraint function that was passed to the variable constructor.
      Can be `None` if no constraint was passed.
    )r   r   s    r:   r   zBaseResourceVariable.constraint  s     r<   returnc                 .    | j                   j                  S zThe op for this variable.)r9   rg   r   s    r:   rg   zBaseResourceVariable.op  s     ;;>>r<   c                     | j                   S r{   )r   r   s    r:   r   zBaseResourceVariable.trainable  s    ??r<   c                     | j                   S r{   )r   r   s    r:   r   z$BaseResourceVariable.synchronization  s       r<   c                     | j                   S r{   )r   r   s    r:   r   z BaseResourceVariable.aggregation  s    r<   c                 x    t        j                         rt        d      | j                  j	                  |      S )z1Evaluates and returns the value of this variable.r  )session)r   r|   r^   r   eval)r   r2  s     r:   r3  zBaseResourceVariable.eval  s:      " < = =##G#44r<   c                 |    t        j                         r| j                         j                         S t	        d      )Nz:numpy() is only available when eager execution is enabled.)r   r|   r   r   r  r   s    r:   r   zBaseResourceVariable.numpy  s6      "__$$&&
DF Fr<   zPrefer Dataset.range instead.c                 Z    t        j                  | j                  || j                        S )a  Increments this variable until it reaches `limit`.

    When that Op is run it tries to increment the variable by `1`. If
    incrementing the variable would bring it above `limit` then the Op raises
    the exception `OutOfRangeError`.

    If no error is raised, the Op outputs the value of the variable before
    the increment.

    This is essentially a shortcut for `count_up_to(self, limit)`.

    Args:
      limit: value at which incrementing the variable raises an error.

    Returns:
      A `Tensor` that will hold the variable value before the increment. If no
      other Op modifies this variable, the values produced will all be
      distinct.
    )limitT)r!   resource_count_up_tor9   rB   )r   r6  s     r:   count_up_toz BaseResourceVariable.count_up_to  s&    * --5DJJ0 0r<   c                    | |vrt         j                  j                  | j                        j	                  dd      j                         }t        j                  |      5  t        | j                  | j                  | j                  | j                        }ddd       || <   ||    }t        j                  |j                        5  |j                  | j                                ddd       y# 1 sw Y   [xY w# 1 sw Y   yxY w)For implementing `Trackable`.CPUr   )device_typedevice_index)r   rA   rB   rk   N)pydev
DeviceSpecfrom_stringr   replace	to_stringr   UninitializedVariabler   rA   rB   r  assignr   )r   
object_map	op_devicenew_vardestination_vars        r:   _copy_trackable_to_cpuz+BaseResourceVariable._copy_trackable_to_cpu  s    :""..t{{;CC! D --6Y[ ::i  $ (nn****""	$$ !j !&O	O**	+ 0 T__./0 0$ $0 0s   "8C4 D 4C= D	c                 &   d}|j                   j                         r4t        j                  | j                        5  t	        |       }ddd       nt	        |       }||| <   |j
                  || j
                  <   | j
                  gS # 1 sw Y   4xY w)r;  N)experimental_variable_policy_save_variable_devicesr   r   copy_to_graph_uninitializedr9   )r   rF  
tensor_mapoptionsr   new_variables         r:   _export_to_saved_model_graphz1BaseResourceVariable._export_to_saved_model_graph  s     L++BBD::dkk" 92489 9 16l#Jt*11Jt{{KK=9 9s   BBc                       fd}t         j                  t        j                  | j                   j
                        iS )z+Implements Trackable._serialize_to_tensors.c                     } t        j                  | j                        5  t        j                         r| j	                         s
	 d d d        y | j                         }t        j                  d      5  t        j                  |      cd d d        cd d d        S # 1 sw Y   nxY w	 d d d        y # 1 sw Y   y xY w)Nz/device:CPU:0)r   r   r   r|   is_initializedread_value_no_copyr   identity)vxr   s     r:   _read_variable_closurezJBaseResourceVariable._serialize_to_tensors.<locals>._read_variable_closure  s    
a::ahh 
'$$&q/?/?/A 	
' 
'   " ZZ( 	'##A&	' 	'
' 
'	' 	' 	'
' 
' 
's)   &B5%B57B	B5B(	$B55B>)rB   r   )	trackableVARIABLE_VALUE_KEYr   CallablerB   r   )r   rZ  s   ` r:   _serialize_to_tensorsz*BaseResourceVariable._serialize_to_tensors	  s;    ' 	$$$$&djjN r<   c                    t        j                  | j                        5  t        j                  |t        j
                           }	 t        | j                  | j                  |      }|cddd       S # t        $ r9}t        d|j                   d| j                   d| j                   d      |d}~ww xY w# 1 sw Y   yxY w)z+Implements Trackable._restore_from_tensors.z(Received incompatible tensor with shape z0 when attempting to restore variable with shape z
 and name .N)r   r   r   rW  r[  r\  r   r9   rA   r   rk   )r   restored_tensorsrestored_tensorassigned_variablees        r:   _restore_from_tensorsz*BaseResourceVariable._restore_from_tensors   s    	DKK	  !**
977
8:o-=KK_6    -67L7L6M N>>Bjj\ J		{!%& ,-	-- s/   'B9!A4)B94	B6=4B11B66B99Cc                 ,    t                d _         fd}t         dd      Qt        j                  dd      5  t        j
                   j                        5   ||      }ddd       ddd       n ||      }t        j                         s(t        j                  dg j                  gd d 	       t        j                         rt        j                         s j                   j                  j                         }t        j                        5  t        j                   ||
      }ddd       |j"                  j%                  dt'        j(                  |             S # 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   UxY w)a$  Reads the value of the variable.

    If the variable is in copy-on-read mode and `no_copy` is True, the variable
    is converted to copy-on-write mode before it is read.

    Args:
      no_copy: Whether to prevent a copy of the variable.

    Returns:
      The value of the variable.
    Tc                    | r6t        j                  ddd      rt        j                  j                         t        j
                  j                  j                        }t        j                  j                  |       |S )N        )forward_compatforward_compatibler    disable_copy_on_readr9   read_variable_opr   r   )no_copyresultr   s     r:   read_and_set_handlezCBaseResourceVariable._read_variable_op.<locals>.read_and_set_handle>  s_    	^66tQB!66t{{C(99
++t{{$fT[[$++v>mr<   r   Nr!  r,   c                     | gS r{   r   rY  s    r:   <lambda>z8BaseResourceVariable._read_variable_op.<locals>.<lambda>R  s    qc r<   c                     | gS r{   r   rs  s    r:   rt  z8BaseResourceVariable._read_variable_op.<locals>.<lambda>S  s    aS r<   )backward_functionforward_function)sharding_XlaShardingrR   )r   r   getattrr   r#  r   r   r   r|   r   record_operationr9   +xla_sharding_for_resource_variables_enabledr   SerializeToStringr   r   rg   	_set_attrr   	AttrValue)r   ro  rq  rp  sharding_strings   `    r:   r$  z&BaseResourceVariable._read_variable_op/  sc    dD t&-9T48 0ZZ,,- 	0&w/&	00 0 #7+f$$& 
VHt{{m)(* 	;;=))+***<<>oV$ L))&?KL ii


"
"_
5 M;	0 	00 0.L Ls/    E=	E0&E=F
0E:	5E==F
Fc                     t        j                  d      5  | j                         }ddd       t        j                        S # 1 sw Y   xY w)zConstructs an op which reads the value of this variable.

    Should be used when there are multiple reads, or when it is desirable to
    read the value only after some condition is true.

    Returns:
      The value of the variable.
    ReadNr   
name_scoper$  r   rW  r   r   s     r:   r   zBaseResourceVariable.read_valueg  sF     
	 '$$&e' e$$	' 's   AAc                     t        j                  d      5  | j                  d      }ddd       t        j                        S # 1 sw Y   xY w)a"  Constructs an op which reads the value of this variable without copy.

    The variable is read without making a copy even when it has been sparsely
    accessed. Variables in copy-on-read mode will be converted to copy-on-write
    mode.

    Returns:
      The value of the variable.
    r  T)ro  Nr  r  s     r:   rV  z'BaseResourceVariable.read_value_no_copyv  sK     
	 3$$T$2e3 e$$	3 3s   AAc                 $   t        j                  |dn|      5 }t        |        t        j                  | j
                  || j                  |      }| j                  t        j                  k(  rt        | j
                        }|j                  rKt        |j                        dkD  r3t        j                  j                  d|j                  dd       |_        t#        j$                  |      cddd       S 	 ddd       |S # 1 sw Y   S xY w)z:Reads the value of this variable sparsely, using `gather`.NGatherrB   rk   rX   Tr   )r   r  r   r    resource_gatherr9   r   r   r\   r;   r]   rD   r@   r   r   r   r6   r   rW  )r   indicesrk   r   rM   s        r:   sparse_readz BaseResourceVariable.sparse_read  s    	DLd	; )t'77
++wdkk>e 
	& 1=#k&@&@"AA"E%==HHk.H.H.L I N 
 !!%() )
 
') L) Ls   CDDc                    t        j                  |dn|      5 }| j                  rt        |        t	        j
                  | j                  || j                  |      }ddd       t        j                        S # 1 sw Y   xY w)z=Reads the value of this variable sparsely, using `gather_nd`.NGatherNdr  )
r   r  r   r   r    resource_gather_ndr9   r   r   rW  )r   r  rk   r   s       r:   	gather_ndzBaseResourceVariable.gather_nd  sn    	dl
	= >	$'::
++wdkk>e> e$$> >s   AA<<Bc                    t        j                         rt        d      |&| j                  j                  j                  |      rt        j                         }t        j                  | j                  j                  |      |_
        | j                  /t        j                  | j                  j                  |      |_        t        j                  | j                  j                  |      |_        | j                  0t        j                  | j                  j                  |      |_        n/t        j                  | j"                  j                  |      |_        d|_        | j&                  |_        | j(                  j*                  |_        | j,                  j*                  |_        | j.                  r5|j0                  j3                  | j.                  j5                  |             |S y)a@  Converts a `ResourceVariable` to a `VariableDef` protocol buffer.

    Args:
      export_scope: Optional `string`. Name scope to remove.

    Raises:
      RuntimeError: If run in EAGER mode.

    Returns:
      A `VariableDef` protocol buffer, or `None` if the `Variable` is not
      in the specified name scope.
    r  NT)export_scope)r   r|   r^   r9   rk   
startswithr   VariableDefr   strip_name_scopevariable_namer   initial_value_namer(  initializer_namer   snapshot_namer   is_resourcer   r   r   r   r   save_slice_info_def	MergeFromto_proto)r   r  var_defs      r:   r  zBaseResourceVariable.to_proto  s      " < = =t{{//::<H((*g!224;;3C3C3?Ag				( &)%9%9$$l&4"!$!5!5d6F6F6K6K6B"Dg				' # 4 4T5G5G5L5L5A!C !$ 4 4T5H5H5M5M5A!C g..g $ 4 4 : :g ,,22g			##--!!***E	Gnr<   c                 Z    t        j                         rt        d      t        | |      S )Nr  r   r   )r   r|   r^   r   r  s     r:   
from_protozBaseResourceVariable.from_proto  s2      " < = =!> >r<   d   c                 B    t        j                  | j                  |      S )zChecks whether a resource variable has been initialized.

    Outputs boolean scalar indicating whether the tensor has been initialized.

    Args:
      name: A name for the operation (optional).

    Returns:
      A `Tensor` of type `bool`.
    )r    var_is_initialized_opr9   )r   rk   s     r:   rU  z#BaseResourceVariable.is_initialized  s     %::4;;MMr<   c           	      L   t        | j                        5  | j                         5  t        j                  | j                  t        j                  || j                        |      }ddd       ddd       |r| j                        S S # 1 sw Y   &xY w# 1 sw Y   *xY w)al  Subtracts a value from this variable.

    Args:
      delta: A `Tensor`. The value to subtract from this variable.
      use_locking: If `True`, use locking during the operation.
      name: The name to use for the operation.
      read_value: A `bool`. Whether to read and return the new value of the
        variable or not.

    Returns:
      If `read_value` is `True`, this method will return the new value of the
      variable after the assignment has completed. Otherwise, when in graph mode
      it will return the `Operation` that does the assignment, and when in eager
      mode it will return `None`.
    r   r   N)	r   r9   r   r    assign_sub_variable_opr   r   rB   
_lazy_read)r   deltause_lockingrk   r   assign_sub_ops         r:   
assign_subzBaseResourceVariable.assign_sub  s    & 
t{{	# T%>%>%@ /FF
++


TZZ
8m 
 __]++   #   BAB)BB	BB#c           	      L   t        | j                        5  | j                         5  t        j                  | j                  t        j                  || j                        |      }ddd       ddd       |r| j                        S S # 1 sw Y   &xY w# 1 sw Y   *xY w)a^  Adds a value to this variable.

    Args:
      delta: A `Tensor`. The value to add to this variable.
      use_locking: If `True`, use locking during the operation.
      name: The name to use for the operation.
      read_value: A `bool`. Whether to read and return the new value of the
        variable or not.

    Returns:
      If `read_value` is `True`, this method will return the new value of the
      variable after the assignment has completed. Otherwise, when in graph mode
      it will return the `Operation` that does the assignment, and when in eager
      mode it will return `None`.
    r   r   N)	r   r9   r   r    assign_add_variable_opr   r   rB   r  )r   r  r  rk   r   assign_add_ops         r:   
assign_addzBaseResourceVariable.assign_add  s      
t{{	# T%>%>%@ /FF
++


TZZ
8m 
 __]++   r  c                     t        |        t        | j                  | j                  | j                  | j
                  || j                        S )N)r9   rB   rA   r   	parent_opr   )r   _UnreadVariabler9   rB   r   r   r   )r   rg   s     r:   r  zBaseResourceVariable._lazy_read  s?    d{{jjkk))//# #r<   c           
         t        | j                        5  t        j                  || j                        }| j
                  j                  |j                        sP| j                  d}ndt        | j                        z   }t        d| d| j
                   d|j                   d      i }t        j                  d	d
d      r-| j                  xr | j
                  j                         }||d<   t        j                   | j                  |fd|i|}	|r| j#                  |	      cddd       S 	 ddd       |	S # 1 sw Y   	S xY w)ae  Assigns a new value to this variable.

    Args:
      value: A `Tensor`. The new value for this variable.
      use_locking: If `True`, use locking during the assignment.
      name: The name to use for the assignment.
      read_value: A `bool`. Whether to read and return the new value of the
        variable or not.

    Returns:
      If `read_value` is `True`, this method will return the new value of the
      variable after the assignment has completed. Otherwise, when in graph mode
      it will return the `Operation` that does the assignment, and when in eager
      mode it will return `None`.
    r   Nre    z!Cannot assign value to variable 'z%': Shape mismatch.The variable shape z, and the assigned value shape z are incompatible.rh  rj     r   rk   )r   r9   r   r   rB   r   is_compatible_withrA   rk   strr   rk  rl  r   is_fully_definedr    r   r  )
r   r   r  rk   r   r   tensor_namer   r   	assign_ops
             r:   rE  zBaseResourceVariable.assign'  sG   $ 
t{{	# ***5

Cl[[++L,>,>?99+c$))n,+0 >##';;- 0%%1%7%7$88JLN 	N f		*	*4B	7 --P$++2N2N2P#1 +>>
++|:*.:28:i	y))* *& 
'** +** s   DD==Ec           	          t        j                  t        | j                         | j                  | j
                  | j                  | j                  | j                        dfS )N)r`   r   rk   rB   r   r   r   )		functoolspartialr   r   r   r  rB   r   r   r   s    r:   
__reduce__zBaseResourceVariable.__reduce__P  sR    jjl..jj?? 557 9;; ;r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )ah  Subtracts `tf.IndexedSlices` from this variable.

    Args:
      sparse_delta: `tf.IndexedSlices` to be subtracted from this variable.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    DArgument `sparse_delta` must be a `tf.IndexedSlices`. Received arg: r   )r2   r   IndexedSlicesr   r  r    resource_scatter_subr9   r  r   r   valuesrB   r   sparse_deltar  rk   s       r:   scatter_subz BaseResourceVariable.scatter_sub[       lN$@$@A ;;G.J K K??!66KK  !!,"5"5tzzB		 r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )aZ  Adds `tf.IndexedSlices` to this variable.

    Args:
      sparse_delta: `tf.IndexedSlices` to be added to this variable.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  r   )r2   r   r  r   r  r    resource_scatter_addr9   r  r   r   r  rB   r  s       r:   scatter_addz BaseResourceVariable.scatter_adds  r  r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )a  Updates this variable with the max of `tf.IndexedSlices` and itself.

    Args:
      sparse_delta: `tf.IndexedSlices` to use as an argument of max with this
        variable.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  r   )r2   r   r  r   r  r    resource_scatter_maxr9   r  r   r   r  rB   r  s       r:   scatter_maxz BaseResourceVariable.scatter_max       lN$@$@A ;;G.J K K??!66KK  !!,"5"5tzzB		 r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )a  Updates this variable with the min of `tf.IndexedSlices` and itself.

    Args:
      sparse_delta: `tf.IndexedSlices` to use as an argument of min with this
        variable.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  r   )r2   r   r  r   r  r    resource_scatter_minr9   r  r   r   r  rB   r  s       r:   scatter_minz BaseResourceVariable.scatter_min  r  r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )a^  Multiply this variable by `tf.IndexedSlices`.

    Args:
      sparse_delta: `tf.IndexedSlices` to multiply this variable by.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  r   )r2   r   r  r   r  r    resource_scatter_mulr9   r  r   r   r  rB   r  s       r:   scatter_mulz BaseResourceVariable.scatter_mul  r  r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )aZ  Divide this variable by `tf.IndexedSlices`.

    Args:
      sparse_delta: `tf.IndexedSlices` to divide this variable by.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  r   )r2   r   r  r   r  r    resource_scatter_divr9   r  r   r   r  rB   r  s       r:   scatter_divz BaseResourceVariable.scatter_div  r  r<   c           
         t        |t        j                        st        d|       | j	                  t        j                  | j                  |j                  t        j                  |j                  | j                        |            S )a`  Assigns `tf.IndexedSlices` to this variable.

    Args:
      sparse_delta: `tf.IndexedSlices` to be assigned to this variable.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  r   )r2   r   r  r   r  r    resource_scatter_updater9   r  r   r   r  rB   r  s       r:   scatter_updatez#BaseResourceVariable.scatter_update  s     lN$@$@A ;;G.J K K??!99KK  !!,"5"5tzzB		 r<   c           	          t        |t        j                        st        d|       | j	                  t        j                  | |j                  |j                  ||            S )a  Assigns `tf.IndexedSlices` to this variable batch-wise.

    Analogous to `batch_gather`. This assumes that this variable and the
    sparse_delta IndexedSlices have a series of leading dimensions that are the
    same for all of them, and the updates are performed on the last dimension of
    indices. In other words, the dimensions should be the following:

    `num_prefix_dims = sparse_delta.indices.ndims - 1`
    `batch_dim = num_prefix_dims + 1`
    `sparse_delta.updates.shape = sparse_delta.indices.shape + var.shape[
         batch_dim:]`

    where

    `sparse_delta.updates.shape[:num_prefix_dims]`
    `== sparse_delta.indices.shape[:num_prefix_dims]`
    `== var.shape[:num_prefix_dims]`

    And the operation performed can be expressed as:

    `var[i_1, ..., i_n,
         sparse_delta.indices[i_1, ..., i_n, j]] = sparse_delta.updates[
            i_1, ..., i_n, j]`

    When sparse_delta.indices is a 1D tensor, this operation is equivalent to
    `scatter_update`.

    To avoid this operation one can looping over the first `ndims` of the
    variable and using `scatter_update` on the subtensors that result of slicing
    the first dimension. This is a valid option for `ndims = 1`, but less
    efficient than this implementation.

    Args:
      sparse_delta: `tf.IndexedSlices` to be assigned to this variable.
      use_locking: If `True`, use locking during the operation.
      name: the name of the operation.

    Returns:
      The updated variable.

    Raises:
      TypeError: if `sparse_delta` is not an `IndexedSlices`.
    r  )r  rk   )	r2   r   r  r   r  r#   batch_scatter_updater  r  r  s       r:   r  z)BaseResourceVariable.batch_scatter_update  sn    X lN$@$@A ;;G.J K K??&&  #	 r<   c           
          | j                  t        j                  | j                  |t	        j
                  || j                        |            S )a  Applies sparse subtraction to individual values or slices in a Variable.

    `ref` is a `Tensor` with rank `P` and `indices` is a `Tensor` of rank `Q`.

    `indices` must be integer tensor, containing indices into `ref`.
    It must be shape `[d_0, ..., d_{Q-2}, K]` where `0 < K <= P`.

    The innermost dimension of `indices` (with length `K`) corresponds to
    indices into elements (if `K = P`) or slices (if `K < P`) along the `K`th
    dimension of `ref`.

    `updates` is `Tensor` of rank `Q-1+P-K` with shape:

    ```
    [d_0, ..., d_{Q-2}, ref.shape[K], ..., ref.shape[P-1]].
    ```

    For example, say we want to add 4 scattered elements to a rank-1 tensor to
    8 elements. In Python, that update would look like this:

    ```python
        ref = tf.Variable([1, 2, 3, 4, 5, 6, 7, 8])
        indices = tf.constant([[4], [3], [1] ,[7]])
        updates = tf.constant([9, 10, 11, 12])
        op = ref.scatter_nd_sub(indices, updates)
        with tf.compat.v1.Session() as sess:
          print sess.run(op)
    ```

    The resulting update to ref would look like this:

        [1, -9, 3, -6, -6, 6, 7, -4]

    See `tf.scatter_nd` for more details about how to make updates to
    slices.

    Args:
      indices: The indices to be used in the operation.
      updates: The values to be used in the operation.
      name: the name of the operation.

    Returns:
      The updated variable.
    r   )r  r!   resource_scatter_nd_subr9   r   r   rB   r   r  updatesrk   s       r:   scatter_nd_subz#BaseResourceVariable.scatter_nd_sub<  E    Z ??--KK!!'4::6		 r<   c           
          | j                  t        j                  | j                  |t	        j
                  || j                        |            S )a  Applies sparse addition to individual values or slices in a Variable.

    `ref` is a `Tensor` with rank `P` and `indices` is a `Tensor` of rank `Q`.

    `indices` must be integer tensor, containing indices into `ref`.
    It must be shape `[d_0, ..., d_{Q-2}, K]` where `0 < K <= P`.

    The innermost dimension of `indices` (with length `K`) corresponds to
    indices into elements (if `K = P`) or slices (if `K < P`) along the `K`th
    dimension of `ref`.

    `updates` is `Tensor` of rank `Q-1+P-K` with shape:

    ```
    [d_0, ..., d_{Q-2}, ref.shape[K], ..., ref.shape[P-1]].
    ```

    For example, say we want to add 4 scattered elements to a rank-1 tensor to
    8 elements. In Python, that update would look like this:

    ```python
        ref = tf.Variable([1, 2, 3, 4, 5, 6, 7, 8])
        indices = tf.constant([[4], [3], [1] ,[7]])
        updates = tf.constant([9, 10, 11, 12])
        add = ref.scatter_nd_add(indices, updates)
        with tf.compat.v1.Session() as sess:
          print sess.run(add)
    ```

    The resulting update to ref would look like this:

        [1, 13, 3, 14, 14, 6, 7, 20]

    See `tf.scatter_nd` for more details about how to make updates to
    slices.

    Args:
      indices: The indices to be used in the operation.
      updates: The values to be used in the operation.
      name: the name of the operation.

    Returns:
      The updated variable.
    r   )r  r!   resource_scatter_nd_addr9   r   r   rB   r  s       r:   scatter_nd_addz#BaseResourceVariable.scatter_nd_addp  r  r<   c           
          | j                  t        j                  | j                  |t	        j
                  || j                        |            S )a  Applies sparse assignment to individual values or slices in a Variable.

    `ref` is a `Tensor` with rank `P` and `indices` is a `Tensor` of rank `Q`.

    `indices` must be integer tensor, containing indices into `ref`.
    It must be shape `[d_0, ..., d_{Q-2}, K]` where `0 < K <= P`.

    The innermost dimension of `indices` (with length `K`) corresponds to
    indices into elements (if `K = P`) or slices (if `K < P`) along the `K`th
    dimension of `ref`.

    `updates` is `Tensor` of rank `Q-1+P-K` with shape:

    ```
    [d_0, ..., d_{Q-2}, ref.shape[K], ..., ref.shape[P-1]].
    ```

    For example, say we want to add 4 scattered elements to a rank-1 tensor to
    8 elements. In Python, that update would look like this:

    ```python
        ref = tf.Variable([1, 2, 3, 4, 5, 6, 7, 8])
        indices = tf.constant([[4], [3], [1] ,[7]])
        updates = tf.constant([9, 10, 11, 12])
        op = ref.scatter_nd_update(indices, updates)
        with tf.compat.v1.Session() as sess:
          print sess.run(op)
    ```

    The resulting update to ref would look like this:

        [1, 11, 3, 10, 9, 6, 7, 12]

    See `tf.scatter_nd` for more details about how to make updates to
    slices.

    Args:
      indices: The indices to be used in the operation.
      updates: The values to be used in the operation.
      name: the name of the operation.

    Returns:
      The updated variable.
    r   )r  r!   resource_scatter_nd_updater9   r   r   rB   r  s       r:   scatter_nd_updatez&BaseResourceVariable.scatter_nd_update  sE    Z ??00KK!!'4::6		 r<   c           
          | j                  t        j                  | j                  |t	        j
                  || j                        |            S )av  Updates this variable with the max of `tf.IndexedSlices` and itself.

    `ref` is a `Tensor` with rank `P` and `indices` is a `Tensor` of rank `Q`.

    `indices` must be integer tensor, containing indices into `ref`.
    It must be shape `[d_0, ..., d_{Q-2}, K]` where `0 < K <= P`.

    The innermost dimension of `indices` (with length `K`) corresponds to
    indices into elements (if `K = P`) or slices (if `K < P`) along the `K`th
    dimension of `ref`.

    `updates` is `Tensor` of rank `Q-1+P-K` with shape:

    ```
    [d_0, ..., d_{Q-2}, ref.shape[K], ..., ref.shape[P-1]].
    ```

    See `tf.scatter_nd` for more details about how to make updates to
    slices.

    Args:
      indices: The indices to be used in the operation.
      updates: The values to be used in the operation.
      name: the name of the operation.

    Returns:
      The updated variable.
    r   )r  r!   resource_scatter_nd_maxr9   r   r   rB   r  s       r:   scatter_nd_maxz#BaseResourceVariable.scatter_nd_max  D    : ??--KK!!'4::6		 r<   c           
          | j                  t        j                  | j                  |t	        j
                  || j                        |            S )av  Updates this variable with the min of `tf.IndexedSlices` and itself.

    `ref` is a `Tensor` with rank `P` and `indices` is a `Tensor` of rank `Q`.

    `indices` must be integer tensor, containing indices into `ref`.
    It must be shape `[d_0, ..., d_{Q-2}, K]` where `0 < K <= P`.

    The innermost dimension of `indices` (with length `K`) corresponds to
    indices into elements (if `K = P`) or slices (if `K < P`) along the `K`th
    dimension of `ref`.

    `updates` is `Tensor` of rank `Q-1+P-K` with shape:

    ```
    [d_0, ..., d_{Q-2}, ref.shape[K], ..., ref.shape[P-1]].
    ```

    See `tf.scatter_nd` for more details about how to make updates to
    slices.

    Args:
      indices: The indices to be used in the operation.
      updates: The values to be used in the operation.
      name: the name of the operation.

    Returns:
      The updated variable.
    r   )r  r!   resource_scatter_nd_minr9   r   r   rB   r  s       r:   scatter_nd_minz#BaseResourceVariable.scatter_nd_min  r  r<   c                     t        | ||       y)a[  Writes additional information of the variable into the SavedObject proto.

    Subclasses of ResourceVariables could choose to override this method to
    customize extra information to provide when saving a SavedModel.

    Ideally, this should contain the logic in
    write_object_proto_for_resource_variable but `DistributedValue` is an
    outlier at the momemnt. Once `DistributedValue` becomes a proper
    ResourceVariable, we should remove the helper method below.

    Args:
      proto: `SavedObject` proto to update.
      options: A `SaveOption` instance that configures save behavior.
    N)(write_object_proto_for_resource_variable)r   protorP  s      r:   _write_object_protoz(BaseResourceVariable._write_object_proto   s     -T5'Br<   c                 f   t        | j                        5  | j                         5  | j                  t	        j
                  | j                  |||t        j                  || j                        |||||	|
            cd d d        cd d d        S # 1 sw Y   nxY wd d d        y # 1 sw Y   y xY w)Nr   )refbeginendstridesr   rk   
begin_maskend_maskellipsis_masknew_axis_maskshrink_axis_mask)	r   r9   r   r  r   resource_strided_slice_assignr   r   rB   )r   r  r  r  r   rk   r  r  r  r  r  s              r:   _strided_slice_assignz*BaseResourceVariable._strided_slice_assign1  s     
t{{	# 2T%>%>%@ 2__

5
5++))%tzzB#))/122 2 2 2 2 2 2s#   B'AB?	B'B	B''B0c                 P    t        | j                         j                               S r{   )complexr   r   r   s    r:   __complex__z BaseResourceVariable.__complex__C  s    4::<%%'((r<   c                 P    t        | j                         j                               S r{   )intr   r   r   s    r:   __int__zBaseResourceVariable.__int__F  s    tzz|!!#$$r<   c                 P    t        | j                         j                               S r{   )longr   r   r   s    r:   __long__zBaseResourceVariable.__long__I  s    

""$%%r<   c                 P    t        | j                         j                               S r{   )floatr   r   r   s    r:   	__float__zBaseResourceVariable.__float__L  s    ##%&&r<   c           	         ~|N|j                  | j                        s3t        d|j                   d| j                  j                   d|  d      |r'| j	                         j
                  j                  d   S | j                         S )Nz/Incompatible type conversion requested to type z for `tf.Variable of type z. (Variable: )r   )r  rB   r   rk   r   rg   inputsr   )r   rB   rk   as_refs       r:   _dense_var_to_tensorz)BaseResourceVariable._dense_var_to_tensorO  s    !9!9$**!E;EJJ< H""&**//!2-vQHI I __!!((++ZZ\r<   c                     t        d      )Nz`variable += value` with `tf.Variable`s is not supported. Use `variable.assign_add(value)` to modify the variable, or `out = variable + value` if you need to get a new output Tensor.r^   r   unused_others     r:   __iadd__zBaseResourceVariable.__iadd__Z      
 : ; ;r<   c                     t        d      )Nz`variable -= value` with `tf.Variable`s is not supported. Use `variable.assign_sub(value)` to modify the variable, or `out = variable * value` if you need to get a new output Tensor.r  r  s     r:   __isub__zBaseResourceVariable.__isub__`  r  r<   c                     t        d      )Nz`var *= value` with `tf.Variable`s is not supported. Use `var.assign(var * value)` to modify the variable, or `out = var * value` if you need to get a new output Tensor.r  r  s     r:   __imul__zBaseResourceVariable.__imul__f  r  r<   c                     t        d      Nz`var /= value` with `tf.Variable`s is not supported. Use `var.assign(var / value)` to modify the variable, or `out = var / value` if you need to get a new output Tensor.r  r  s     r:   __idiv__zBaseResourceVariable.__idiv__l  r  r<   c                     t        d      r  r  r  s     r:   __itruediv__z!BaseResourceVariable.__itruediv__r  r  r<   c                     t        d      r  r  r  s     r:   __irealdiv__z!BaseResourceVariable.__irealdiv__x  r  r<   c                     t        d      )Nz`var **= value` with `tf.Variable`s is not supported. Use `var.assign(var ** value)` to modify the variable, or `out = var ** value` if you need to get a new output Tensor.r  r  s     r:   __ipow__zBaseResourceVariable.__ipow__~  r  r<   )NNNNNNNNNNNNNNNNNNNTr{   )NNN)FNNTFNNNF)Yr   r   r   r   r   r   r   r   r   
contextlibcontextmanagerr   r   r   r   r  r	  propertyrB   r   rH   rk   rA   r  r  r  r  r9   r   r&  r(  r`   r   r   	Operationrg   r   r   r   r3  r   r*   r9  rJ  rR  r^  re  r$  r   rV  r  r  r  staticmethodr  __array_priority__rU  r  r  r  rE  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r  r
  r  r  r  r  r  r  r!  r#  r%  r   r<   r:   r   r   y  s   2
 +m ^&(8&+  >"#           02
      &         #--     ! !  5F d340 50.0. FJ+/.6p%% &%,\ > > N82#'R	;00220005n2h2h2h"H"HC"2$)%&'	;;;;;;;r<   r   c                       e Zd ZdZd Zd Zy)ResourceVariableGradientz6CompositeTensorGradient protocol for ResourceVariable.c                     |S )a  Returns the components of `value` that should be included in gradients.

    For a ResourceVariable, its gradient component is its handle tensor.
    For now, we return the ResourceVariable because the gradient infrastructure
    has special logic to handle ResourceVariables. We should remove the special
    logic and return the handle tensor.

    Args:
      value: A `ResourceVariable`.

    Returns:
      `value` itself.
    r   r  s     r:   get_gradient_componentsz0ResourceVariableGradient.get_gradient_components  s	     Lr<   c                     |S )a  Replaces the gradient components in `value` with `component_grads`.

    The gradient of a ResourceVariable is either None or a Tensor. So we don't
    need `value`'s TypeSpec or non-gradient components in this method.

    Args:
      value: A `ResourceVariable` with its gradient components compatible with
        `component_grads`.
      component_grads: A `Tensor` or None as the gradient result.

    Returns:
      The `component_grads`, which is either a `Tensor` or None.
    r   )r   r   component_gradss      r:   replace_gradient_componentsz4ResourceVariableGradient.replace_gradient_components  s
     r<   N)r   r   r   r   r2  r5  r   r<   r:   r0  r0    s    > r<   r0  c                        e Zd ZdZ	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 ddZed        Zd Z e       Z		 	 	 	 	 	 	 	 	 	 	 	 	 d	 fd	Z
	 	 d
dZ	 	 	 	 d fd	Z xZS )r   as	  Variable based on resource handles.

  See the [Variables How To](https://tensorflow.org/guide/variables)
  for a high level overview.

  A `ResourceVariable` allows you to maintain state across subsequent calls to
  session.run.

  The `ResourceVariable` constructor requires an initial value for the variable,
  which can be a `Tensor` of any type and shape. The initial value defines the
  type and shape of the variable. After construction, the type and shape of
  the variable are fixed. The value can be changed using one of the assign
  methods.

  Just like any `Tensor`, variables created with
  `tf.Variable(use_resource=True)` can be used as inputs for other Ops in the
  graph. Additionally, all the operators overloaded for the `Tensor` class are
  carried over to variables, so you can also add nodes to the graph by just
  doing arithmetic on variables.

  Unlike ref-based variable, a ResourceVariable has well-defined semantics. Each
  usage of a ResourceVariable in a TensorFlow graph adds a read_value operation
  to the graph. The Tensors returned by a read_value operation are guaranteed to
  see all modifications to the value of the variable which happen in any
  operation on which the read_value depends on (either directly, indirectly, or
  via a control dependency) and guaranteed to not see any modification to the
  value of the variable from operations that depend on the read_value operation.
  Updates from operations that have no dependency relationship to the read_value
  operation might or might not be visible to read_value.

  For example, if there is more than one assignment to a ResourceVariable in
  a single session.run call there is a well-defined value for each operation
  which uses the variable's value if the assignments and the read are connected
  by edges in the graph. Consider the following example, in which two writes
  can cause tf.Variable and tf.ResourceVariable to behave differently:

  ```python
  a = tf.Variable(1.0, use_resource=True)
  a.initializer.run()

  assign = a.assign(2.0)
  with tf.control_dependencies([assign]):
    b = a.read_value()
  with tf.control_dependencies([b]):
    other_assign = a.assign(3.0)
  with tf.control_dependencies([other_assign]):
    # Will print 2.0 because the value was read before other_assign ran. If
    # `a` was a tf.Variable instead, 2.0 or 3.0 could be printed.
    tf.compat.v1.Print(b, [b]).eval()
  ```
  c                    |rJ|t        d| d|       t        j                         rt        d|       | j                  ||	|       y|| j	                  ||||       y| j                  |||||||
||||||       y)a  Creates a variable.

    Args:
      initial_value: A `Tensor`, or Python object convertible to a `Tensor`,
        which is the initial value for the Variable. Can also be a callable with
        no argument that returns the initial value when called. (Note that
        initializer functions from init_ops.py must first be bound to a shape
        before being used here.)
      trainable: If `True`, the default, also adds the variable to the graph
        collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
        the default list of variables to use by the `Optimizer` classes.
        Defaults to `True`, unless `synchronization` is set to `ON_READ`, in
        which case it defaults to `False`.
      collections: List of graph collections keys. The new variable is added to
        these collections. Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
      validate_shape: If `False`, allows the variable to be initialized with a
        value of unknown shape. If `True`, the default, the shape of
        `initial_value` must be known.
      caching_device: Optional device string or function describing where the
        Variable should be cached for reading.  Defaults to the Variable's
        device.  If not `None`, caches on another device.  Typical use is to
        cache on the device where the Ops using the Variable reside, to
        deduplicate copying through `Switch` and other conditional statements.
      name: Optional name for the variable. Defaults to `'Variable'` and gets
        uniquified automatically.
      dtype: If set, initial_value will be converted to the given type. If None,
        either the datatype will be kept (if initial_value is a Tensor) or
        float32 will be used (if it is a Python object convertible to a Tensor).
      variable_def: `VariableDef` protocol buffer. If not None, recreates the
        `ResourceVariable` object with its contents. `variable_def` and other
        arguments (except for import_scope) are mutually exclusive.
      import_scope: Optional `string`. Name scope to add to the
        ResourceVariable. Only used when `variable_def` is provided.
      constraint: An optional projection function to be applied to the variable
        after being updated by an `Optimizer` (e.g. used to implement norm
        constraints or value constraints for layer weights). The function must
        take as input the unprojected Tensor representing the value of the
        variable and return the Tensor for the projected value (which must have
        the same shape). Constraints are not safe to use when doing asynchronous
        distributed training.
      distribute_strategy: The tf.distribute.Strategy this variable is being
        created inside of.
      synchronization: Indicates when a distributed a variable will be
        aggregated. Accepted values are constants defined in the class
        `tf.VariableSynchronization`. By default the synchronization is set to
        `AUTO` and the current `DistributionStrategy` chooses when to
        synchronize.
      aggregation: Indicates how a distributed variable will be aggregated.
        Accepted values are constants defined in the class
        `tf.VariableAggregation`.
      shape: (optional) The shape of this variable. If None, the shape of
        `initial_value` will be used. When setting this argument to
        `tf.TensorShape(None)` (representing an unspecified shape), the variable
        can be assigned with values of different shapes.
      handle: (optional) The handle of a `tf.Variable`. If provided, only
        `trainable`, `shape`, `dtype`, and `handle` will be used to construct
        this `tf.Variable`.
      experimental_enable_variable_lifting: Whether to lift the variable out if
        it's in a `tf.function`. Default is `True`. When this argument
        is `True`, variable creation will follow the behavior and
        restrictions described
        [here](https://www.tensorflow.org/guide/function#creating_tfvariables).
        If this argument is `False`, that description doesn't apply,
        and you can freely create and use the variable in the
        `tf.function`, as if it's a "mutable `tf.Tensor`". You can't
        return the variable though.

    Raises:
      ValueError: If the initial value is not specified, or does not have a
        shape and `validate_shape` is `True`.

    @compatibility(eager)
    When Eager Execution is enabled, the default for the `collections` argument
    is `None`, which signifies that this `Variable` will not be added to any
    collections.
    @end_compatibility
    NzlThe variable_def and initial_value args to `tf.Variable` are mutually exclusive, but got both: variable_def=z,
initial_value=zwCreating a `tf.Variable` with a `variable_def` arg is not supported when eager execution is enabled. Got: variable_def=)r   r   r   rA   rB   r9   )r`   r   collectionsr   rk   rB   r   r   r   rA   r   r   r   )r   r   r|   _init_from_proto_init_from_handle_init_from_args)r   r`   r   r9  r   r   rk   rB   r   r   r   r   r   r   rA   r9   r   s                    r:   r   zResourceVariable.__init__  s    @ 		" ))5 7**7: ; 	; 
	"	"	$ ..:^= > 	> 
#'  ) 
	
y#(#($*  ,
 %!')!1'/S  r<   c                 ,    t         j                  |       S r{   )r   
from_valuer   s    r:   
_type_speczResourceVariable._type_spece  s    ""4((r<   c                 D    t        || j                  | j                        S r{   )r   rB   r   r  s     r:   _shape_invariant_to_type_specz.ResourceVariable._shape_invariant_to_type_specj  s    tzz4>>::r<   c                 f   t        j                  ||	||      \  }}	}|d}|t        d      t        |      }t	        |t
        j                        r4t        |d      r(|j                  j                  rt        d| d| d      |t        j                  j                  g}t	        |t        t        t        f      st        d| d	t!        |             |%t        |      st        d
t!        |       d|       |rCt        j                  j"                  |vr't        |      t        j                  j"                  gz   }t        j$                         5  t'        j(                          | _        ddd       |rt        j$                  }nt,        j.                  } |       5  t        j0                  |d|rg n|gd      5 }t        j2                  |      }| j*                  r|}|}nd|t        j4                         fz  }d}| j*                  rt        j6                  nt        j8                  }t;        j<                  t:        j<                  j?                  tA        jB                  d|z        g            }t        jD                         jG                  d|i      5  t        j0                  d      5   |d      5  |r |       }t	        |tH        jJ                        r7| jM                          |jN                  jP                  | _)        |jT                  }t        jV                  |d|      }ddd       ddd       |7|jX                  j[                  |      s(t        d|jX                   d| d      |jX                  }t]        ||||| j*                        }t_        j`                  |       |_1        |dz   |_2        ||_3        ddd       | j*                  r.|,|jh                  jk                         t        d| d| d      |jl                  jn                  }| j*                  rt        j0                  d      5  tq        jr                        }ddd       |t        j0                  d       5 }t        jt                  dd!      5  t        j6                  j6                        5  tq        jv                  |t        jx                  ||      |"      }ddd       ddd       ddd       t        j0                  d#      5  t        j6                  j6                        5  tq        jz                  ||      }t}        |||       ddd       }|Tt        jt                  dd!      5  t        j6                  |      5  t        j                  |      }ddd       ddd       nd}ddd       nbtq        jv                  |       d}d}d}|rBt        j6                  |      5  tq        jz                  ||      }t}        |||       ddd       nd}t_        j`                  |       |_A        | j*                  rt        j                  ||        nJt        j                  j                  |v r.t        j                  t        j                  j                  |        ddd       | j*                  r|nd}t        t        |   ||||||	|
||||$       ddd       y# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   KxY w# 1 sw Y   xY w# 1 sw Y   QxY w# 1 sw Y   VxY w# 1 sw Y   [xY w# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   ixY w# 1 sw Y   vxY w# 1 sw Y   xY w# 1 sw Y   yxY w)%a}  Creates a variable.

    Args:
      initial_value: A `Tensor`, or Python object convertible to a `Tensor`,
        which is the initial value for the Variable. The initial value must have
        a shape specified unless `validate_shape` is set to False. Can also be a
        callable with no argument that returns the initial value when called.
        (Note that initializer functions from init_ops.py must first be bound to
        a shape before being used here.)
      trainable: If `True`, the default, also adds the variable to the graph
        collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
        the default list of variables to use by the `Optimizer` classes.
        Defaults to `True`, unless `synchronization` is set to `ON_READ`, in
        which case it defaults to `False`.
      collections: List of graph collections keys. The new variable is added to
        these collections. Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
      caching_device: Optional device string or function describing where the
        Variable should be cached for reading.  Defaults to the Variable's
        device.  If not `None`, caches on another device.  Typical use is to
        cache on the device where the Ops using the Variable reside, to
        deduplicate copying through `Switch` and other conditional statements.
      name: Optional name for the variable. Defaults to `'Variable'` and gets
        uniquified automatically.
      dtype: If set, initial_value will be converted to the given type. If None,
        either the datatype will be kept (if initial_value is a Tensor) or
        float32 will be used (if it is a Python object convertible to a Tensor).
      constraint: An optional projection function to be applied to the variable
        after being updated by an `Optimizer` (e.g. used to implement norm
        constraints or value constraints for layer weights). The function must
        take as input the unprojected Tensor representing the value of the
        variable and return the Tensor for the projected value (which must have
        the same shape). Constraints are not safe to use when doing asynchronous
        distributed training.
      synchronization: Indicates when a distributed a variable will be
        aggregated. Accepted values are constants defined in the class
        `tf.VariableSynchronization`. By default the synchronization is set to
        `AUTO` and the current `DistributionStrategy` chooses when to
        synchronize.
      aggregation: Indicates how a distributed variable will be aggregated.
        Accepted values are constants defined in the class
        `tf.VariableAggregation`.
      distribute_strategy: DistributionStrategy under which this variable was
        created.
      shape: (optional) The shape of this variable. If None, the shape of
        `initial_value` will be used. When setting this argument to
        `tf.TensorShape(None)` (representing an unspecified shape), the variable
        can be assigned with values of different shapes.
      validate_shape: If `False`, allows the variable to be initialized with a
        value of unknown shape. If `True`, the default, the shape of
        `initial_value` must be known.
      experimental_enable_variable_lifting: Whether to lift the variable out if
        it's in a `tf.function`. Default is `True`. When this argument
        is `True`, variable creation will follow the behavior and
        restrictions described
        [here](https://www.tensorflow.org/guide/function#creating_tfvariables).
        If this argument is `False`, that description doesn't apply,
        and you can freely create and use the variable in the
        `tf.function`, as if it's a "mutable `tf.Tensor`". You can't
        return the variable though.

    Raises:
      ValueError: If the initial value is not specified, or does not have a
        shape and `validate_shape` is `True`.

    @compatibility(eager)
    When Eager Execution is enabled, variables are never added to collections.
    It is not implicitly added to the `GLOBAL_VARIABLES` or
    `TRAINABLE_VARIABLES` collections, and the `collections` argument is
    ignored.
    @end_compatibility
    NTzThe `initial_value` arg to `tf.Variable` must be specified except when you are not providing a `variable_def`. You provided neither.rH   zArgument `initial_value` (zS) could not be lifted out of a `tf.function`. (Tried to create variable with name='a8  '). To avoid this error, when constructing `tf.Variable`s inside of `tf.function` you can create the `initial_value` tensor in a `tf.init_scope` or pass a callable `initial_value` (e.g., `tf.Variable(lambda : tf.truncated_normal([10, 40]))`). Please file a feature request if this restriction inconveniences you.zPcollections argument to Variable constructor must be a list, tuple, or set. Got z	 of type zDArgument `constraint` must be None or a callable. a callable. Got a z:  VariableFskip_on_eager%s_%dzloc:@%srz  )list_classInitializerr`   )rk   rB   z;In this `tf.Variable` creation, the initial value's shape (zC) is not compatible with the explicitly supplied `shape` argument (z).)r`   rA   ri   rk   rN   r   z,The `initial_value` passed to `tf.Variable` z is from inside a control-flow  construct, such as a loop or conditional. When creating a `tf.Variable` inside a loop or conditional, use a lambda as the `initial_value`. Got: initial_value=(r  IsInitializedAssignr!  r   r  )r   rA   rB   r9   r   r   r   r   rk   r   r   r   r`   r   r   r   r   r   )Gr$   r   r   callabler2   r3   r4   r   rH   building_functionr   	GraphKeysGLOBAL_VARIABLESrG  r  settypeTRAINABLE_VARIABLESr   r   r|   r   r)  nullcontextr  name_from_scope_nameuidr   NullContextmanagerr   r  	ListValuer   as_bytesrm   _attr_scoper[  CheckpointInitialValue_maybe_initialize_trackablecheckpoint_positionrestore_uid_update_uidwrapped_valuer   rA   r  ry   weakrefr  _parent_trackable_namer   rg   _get_control_flow_contextrB   rx   r    r  r#  r   -_try_guard_against_uninitialized_dependenciesrn  r   r   rW  _cached_variableadd_to_collectionsGLOBAL_STEPsuperr   r   )r   r`   r   r9  r   rk   rB   r   r   r   r   rA   r   r   init_from_fnmaybe_init_scoper   ri   r   device_context_managerattrr9   r   nr   r   r   r   	__class__s                               r:   r<  z ResourceVariable._init_from_argsp  s   p 	@@[)T	; ,O[) ,3-1* ? @ @ M*L-!5!567w< $1$7$7$I$I3M? C??Cf E9
9 
: 
: ]]334kkD%#56  +}Id;6G5HJK K hz&: ,,0,<+=SN O O S]]66kI%)J)J(KKk		 < ' 9 9 ;;d<+#//		 Q>>

Lb}o {B #'..t4#+!) cggi 88)+
 --CJJ33I3I 	''))33??9{#:;< 4 >? ""$00(D1AB 	(~~m, B.DT.J B+om-)I)IJ..0!.!B!B!N!Nd+99m11O5BMB B  &&99%@#))* +??DgRIJ J
 "''E-)%,,.& &-[[%6&
"$t+&,'&
5	(8 M$=668D<TF C: ;H	KL L ##..~~o. I)??G I &) 	Q""4>	FMM*	
 ,>>MM/	 		 	 	 ~~f% " FMM* ;/@@Oe$UFE:; "M) $$T4@ ;ZZ/ ;!*!3!3E!:,;; ; "l'" "* $
6
6v}
M"
.-N+ B6GG%!l$UFLAB B
  L# +2++d*;,
' 
 
 d
3]]&&+5

 
 !:!:D
Aw{Bx (,':':mmd,)!1!%%'-#''% - }Q Q< <<B B B B	( 	(PI I
	 	 	 	 	 	; ;; ;; ;" "4B BS{B {BQ QsU  [2'^'C^$\:	\	A3[?6\	>B\A:^
\& ^?] ] 	7-\3$] 	,]4^ ^1$]	#^8]4	]'$]4	,^7<^3$^B^+>^'2[<?\	\	\\\#^&\0+^3\=8] 	 ]
]]^]$^']1,]4	4]>9^^^^^^$	 ^''^0c                    t        j                         rJ d| _        t        |t        j
                        sJ |j                  st        d|       t        j                         }|j                  t        j                  |j                  |      d      | _        t        j                  | j                  j                   j#                  d            | _        | j                  j&                  | _        | j(                  | _        |j                  t        j                  |j,                  |            | _        t1        |d      rB|j2                  r6|j                  t        j                  |j2                  |            | _        nd| _        t7        j8                  |j:                  |j<                  |j>                  |j                        \  }}}|| _         || _!        || _"        |jF                  r|j                  t        j                  |jF                  |            }|j                   jH                  d	k7  r|| _%        nd| _%        |j                   jH                  d	k7  r3|j                   jL                  d
   }|j                   jH                  d	k7  r3|| _'        n>d| _%        |jQ                  | j                  j                   j&                  dz         | _'        |jS                  d      r1t6        jT                  jW                  |jX                  |      | _-        nd| _-        d| _.        t_        j`                  | j                  j                   j#                  d            | _1        d| _2        || _3        y)z%Initializes from `VariableDef` proto.TzThe `variable_def` you passed to `tf.Variable` is Trying to restore a TF 1.x Reference Variable as a TF 2.x ResourceVariable. This is unsupported. Got variable_def=)r   F)allow_operationrA   r  Nr,   r   z/Read/ReadVariableOp:0r  )r  r   rB   )4r   r|   r   r2   r   r  r  r   r   rm   as_graph_elementprepend_name_scoper  r   r   TensorShaperg   get_attrr   rk   r   r   r  r   r   r  r   r$   r   r   r   r   r   r   r   r  rQ  r   r  r   get_tensor_by_nameHasFieldrC  SaveSliceInfor  r   r   r   rp   r   r   r   )	r   r   r   r   gr   r   r   snapshots	            r:   r:  z!ResourceVariable._init_from_proto  s    ((***DlL$<$<===## + ,8.: ; ; 	A%%&&\	C & DL **4<<??+C+CG+LMDK))D''DO--))	FGD 	23''..

 
 --LJKd !d@@((,*B*B""L$>$>	@ ,O[) ,D#DDO!!##

 
 ((|EFh 
		-	-%!KK 00;;%%a( KK 00$dd 001E1E1I2J Kd23'00>>*>># ? %d #dD//$,,//":":7"CDDKD)Dr<   c                 X   t        |      }|j                  s,t        j                  ||      }t        j                  ||       t        |d      r6t        |j                  t              r|j                  j                  d      }nd }t        |dd       }t        | 1  ||||||       y )Nrb  r   r   )r   rA   rB   r9   r   r   )r;   r]   r"   rt   set_handle_datar   r2   rb  r  rstripr{  rh  r   )	r   r   rA   rB   r9   rM   r   r   rn  s	           r:   r;  z"ResourceVariable._init_from_handle  s    
 -V4K %77uEk&&v{;vwJv||S$ALL''-kkd3I	G5f  6r<   )NNNTNNNNNNNNNNNN)NNNNNNNNNNNTN)NT)NNNN)r   r   r   r   r   r+  r?  rA  r0  __composite_gradient__r<  r:  r;  __classcell__rn  s   @r:   r   r     s    2l +/#BJ ) ); 45 +/Sn %)&*D*N #'""#	6 6r<   r   c                   8     e Zd ZdZ	 	 	 	 	 	 	 	 	 	 d fd	Z xZS )rD  zA variable with no initializer.c                    t        j                         5  t        j                          | _        t        j
                  |dd      5 }t        j                  |      }| j                  r|}|}nd|t        j                         fz  }d}t        ||||| j                  |	      }t        j                  |       |_        |dz   |_        ||_        | j                  rt        j
                  d      5  t        j                  |j                        5  t        j                   ||      }t#        |||       ddd       }ddd       t        j$                  t         j&                  j(                  |        nd}ddd       ddd       t+        t,        | ^  d
|
||||||| j                  d	| y# 1 sw Y   xY w# 1 sw Y   xY w# 1 sw Y   TxY w# 1 sw Y   XxY w)az  Creates the variable handle.

    Args:
      trainable: If `True`, GradientTapes automatically watch uses of this
        Variable.
      caching_device: Optional device string or function describing where the
        Variable should be cached for reading.  Defaults to the Variable's
        device.  If not `None`, caches on another device.  Typical use is to
        cache on the device where the Ops using the Variable reside, to
        deduplicate copying through `Switch` and other conditional statements.
      name: Optional name for the variable. Defaults to `'Variable'` and gets
        uniquified automatically.
      shape: The variable's shape.
      dtype: The variable's dtype.
      constraint: An optional projection function to be applied to the variable
        after being updated by an `Optimizer` (e.g. used to implement norm
        constraints or value constraints for layer weights). The function must
        take as input the unprojected Tensor representing the value of the
        variable and return the Tensor for the projected value (which must have
        the same shape). Constraints are not safe to use when doing asynchronous
        distributed training.
      synchronization: Indicates when a distributed a variable will be
        aggregated. Accepted values are constants defined in the class
        `tf.VariableSynchronization`. By default the synchronization is set to
        `AUTO` and the current `DistributionStrategy` chooses when to
        synchronize.
      aggregation: Indicates how a distributed variable will be aggregated.
        Accepted values are constants defined in the class
        `tf.VariableAggregation`.
      extra_handle_data: Optional, another resource handle or Tensor with handle
        data to merge with `shape` and `dtype`.
      distribute_strategy: The tf.distribute.Strategy this variable is being
        created inside of.
    rC  FrD  rF  N)rA   rB   ri   rk   rN   r`   r   r  )r   rA   rB   r   r   r   r9   r   r   r   r   r   r   )r   r   r   r|   r   r  rT  rU  rv   r`  r  ra  rb  r   r   r    rn  r   add_to_collectionrN  rO  rh  rD  r   )r   r   r   rk   rA   rB   r   r   r   rb   r   r   r   ri   r   r9   r   r   rn  s                     r:   r   zUninitializedVariable.__init__  s   ^ 
	 % !( 9 9 ;;d>>$
%@ !D..t4#+!)cggi 88)+6#**+- $+;;t#4 "T)%~~f% " FMM* ;/@@Oe$UFE:; "M" 

 > >
E
 -C!	%L 

/ </#'))< .;<; ;" "+! !	% %sT   3G	B)G2 G $F4	6
G  9G9G4F=9G  G	GG	GG!)
NNNNNNNNNN)r   r   r   r   r   r~  r  s   @r:   rD  rD    s2    ' a< a<r<   rD  c                 *    | j                  |||      S )N)rB   rk   r  )r  )varrB   rk   r  s       r:   r  r  I	  s    		!	!D	!	HHr<   c                   .    e Zd ZdZ fdZed        Zd Zd Zd Z	d fd	Z
d fd	Zd fd		Zd fd
	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zd fd	Zedej6                  fd       Z xZS )r  z^Represents a future for a read of a variable.

  Pretends to be the tensor if anyone looks.
  c                    t        |t        j                        rd}n|j                  }t	        j
                         st        j                         rd }nBt        j                  |g      5  t        j                  ||      }t        |||       d d d        t        t        | 7  |||||       || _        y # 1 sw Y   *xY w)Nre   )r9   rA   r   r   rB   r   )r2   r   r5   rk   r   r|   inside_functionr   r    rn  r   rh  r  r   
_parent_op)
r   r9   rB   rA   r   r  r   r   r   rn  s
            r:   r   z_UnreadVariable.__init__Y	  s    &#//*kKKk   "c&9&9&;m##YK0 =1BBEufm<= 
/4)# * %  DO= =s   ,$B99Cc                 H    | j                   r| j                  j                  S y)NUnreadVariable)r   r  rk   r   s    r:   rk   z_UnreadVariable.nameq	  s    __!!!r<   c                 "    | j                         S r{   r$  r   s    r:   r   z_UnreadVariable.valuex	      !!##r<   c                 "    | j                         S r{   r  r   s    r:   r   z_UnreadVariable.read_value{	  r  r<   c                    t        j                  | j                  g      5  t        j                  | j
                  | j                        }t        | j                  | j
                  |       |cd d d        S # 1 sw Y   y xY wr{   )r   r   r  r    rn  r   r   r   )r   rp  s     r:   r$  z!_UnreadVariable._read_variable_op~	  s`    		!	!4??"3	4 (99
,,%fT[[$,,?	  s   AA88Bc                     t        j                  | j                  g      5  t        t        |   ||||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r   r  r  rk   r   rn  s        r:   r  z_UnreadVariable.assign_sub	  J    		!	!4??"3	4 A?D4UK5?AA A A   AAc                     t        j                  | j                  g      5  t        t        |   ||||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s        r:   r  z_UnreadVariable.assign_add	  r  r  c                     t        j                  | j                  g      5  t        t        |   ||||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  rE  )r   r   r  rk   r   rn  s        r:   rE  z_UnreadVariable.assign	  sE    		!	!4??"3	4 =?D0T1;== = =r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r   r  r  rk   rn  s       r:   r  z_UnreadVariable.scatter_sub	  C    		!	!4??"3	4 <?D5lK6:<< < <   AA
c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_add	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_max	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_min	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_mul	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_div	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_update	  sH    		!	!4??"3	4 I?((4k4HI I Ir  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z$_UnreadVariable.batch_scatter_update	  sH    		!	!4??"3	4 O?..:KNO O Or  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r   r  r  rk   rn  s       r:   r  z_UnreadVariable.scatter_nd_sub	  D    		!	!4??"3	4 Q?D8'4PQ Q Qr  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_nd_add	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z!_UnreadVariable.scatter_nd_update	  sH    		!	!4??"3	4 C?++2GTBC C Cr  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_nd_max	  r  r  c                     t        j                  | j                  g      5  t        t        |   |||      cd d d        S # 1 sw Y   y xY wr{   )r   r   r  rh  r  r  r  s       r:   r  z_UnreadVariable.scatter_nd_min	  r  r  r+  c                     | j                   S r-  )r  r   s    r:   rg   z_UnreadVariable.op	  s     ??r<   r&  r'  r{   )r   r   r   r   r   r+  rk   r   r   r$  r  r  rE  r  r  r  r  r  r  r  r  r  r  r  r  r  r   r,  rg   r~  r  s   @r:   r  r  S	  s    
 0  $$A
A
=
<
<
<
<
<
<
I
O
QQC
QQ #--  r<   r  c                     |S )zGradient for read op.r   )_grads     r:   	_ReadGradr  	  s	     
+r<   c                 ,   |Mt        j                         j                  j                         rt        j
                  }nt        j                  }t        |       }||j                  st        j                  | |      S |j                  d   j                  }|j                  st        d |j                  D              rt        j                  | |      S t!        j"                  |j                  D cg c]  }|j$                   c}|      S c c}w )aP  Returns the shape of the variable from the handle.

  If the output shape dtype is not specified, it will be set to int64 if
  tf_shape_default_int64 is enabled, otherwise it will be set to int32.

  Args:
    handle: The handle of the variable.
    out_type: The dtype of the output shape.

  Returns:
    The shape of the variable.
  )out_typer   c              3   :   K   | ]  }|j                   d k(    yw)r>   N)rF   ).0rY  s     r:   	<genexpr>z!variable_shape.<locals>.<genexpr>	  s     $KaQVVr\$Ks   r   )r   configtf_shape_default_int64r   r   int64int32r;   r]   r    variable_shaper@   rA   rC   anyrE   r   constantrF   )r9   r  rM   shape_protorY  s        r:   r  r  	  s     ||~,,224hh*62+ 2 2$33FXNN**1-33+$K;??$K!K$33FXNN			{?!qvv?x	PP?s   5Dc                 f   | j                   d   }| j                   d   }t        |      }t        j                  t        j                  |      d      }t        j
                  ||dd gd      }t        j                  ||      }t        j                  ||      }t        j                  |||      dfS )zGradient for gather op.r   rX   N)	r  r  r   expand_dimsrF   concatreshaper   r  )rg   r  r9   r  params_shaperF   values_shaper  s           r:   _GatherGradr  	  s     99Q<&IIaL'',			y~~g6	:$!!4ab)9":A>,T<0&gt,'

&
&vw
Et	LLr<   z%__internal__.ops.is_resource_variable)v1c                 >    t        | t              xs t        | d      S )z>"Returns True if `var` is to be considered a ResourceVariable. _should_act_as_resource_variable)r2   r   r   r  s    r:   is_resource_variabler  
  s&     
C-	. 
/'	-3/ /r<   c           
          t        | j                  | j                  | j                  | j                  | j
                  | j                  | j                  | j                        }|j                          |S )z@Copies an existing variable to a new graph, with no initializer.)r   r   rA   rB   rk   r   r   rb   )
rD  r   r   rA   rB   r  r   r   r9   r[  )r  rQ  s     r:   rN  rN  
  s^    
 'IIII))//

$, **,	r<   AssertVarIsInitializedOpc                       e Zd Zy)StructurePatternN)r   r   r   r   r<   r:   r  r  '
  s    r<   r  c                   "     e Zd ZdZ fdZ xZS )PLeafz-Represents a singleton leaf StructurePattern.c                 \    t        | d      st        | 	  |       | _        | j                  S )Ninstance)r   rh  __new__r  )clsrn  s    r:   r  zPLeaf.__new__.
  s'    3
#W_S)cl<<r<   )r   r   r   r   r  r~  r  s   @r:   r  r  +
  s    5 r<   r  c                       e Zd ZdZd Zd Zy)PListz'Represents a list of StructurePatterns.c                 $    t        |      | _        y r{   )rG  
components)r   r  s     r:   r   zPList.__init__7
  s    :&DOr<   c                 X    t        |t              xr | j                  |j                  k(  S r{   )r2   r  r  r   others     r:   __eq__zPList.__eq__:
  s#    eU#K5;K;K(KKr<   N)r   r   r   r   r   r  r   r<   r:   r  r  4
  s    /'Lr<   r  c                        e Zd ZdZddgZ ed       Zej                  ddf fd	Z	d Z
ed	        Zd
 Zd Zed        Zd Z fdZ fdZd Zd Zd Zd Zd Zd Zd Z xZS )r   zDescribes a tf.Variable.

  A `VariableSpec` provides metadata describing the `tf.Variable` objects
  accepted or returned by TensorFlow 2.x APIs.
  r   r   c                     t         S r{   )r   r   s    r:   rt  zVariableSpec.<lambda>G
  s    %5 r<   TNc                 L    t         t        |   ||       || _        || _        y )Nr   )rh  r   r   r   r   )r   rA   rB   r   r   rn  s        r:   r   zVariableSpec.__init__I
  s%    	,&uE&:DNDMr<   c                 d   t        |t        |       | j                  f      sy| j                  j	                  |j                        xr4 | j
                  |j
                  k(  xr | j                  |j                  k(  }t        |t        |             r|xr | j                  |j                  k(  S |S )a  Returns True if `spec_or_value` is compatible with this `VariableSpec`.

    `spec_or_value` is considered to be compatible with this `VariableSpec` if

    * `spec_or_value` is a `Variable` or `VariableSpec`,
    * their shapes are compatible,
    * their dtypes are the same,
    * they are both trainable or not trainable.
    * they share the same alias_id if `spec_or_value` is a `VariableSpec`.

    Example:

    >>> v = tf.Variable([1., 2., 3.])
    >>> spec = VariableSpec([None])
    >>> spec.is_compatible_with(v)
    True
    >>> v = tf.Variable(1)
    >>> spec.is_compatible_with(v)
    False

    Args:
      spec_or_value: A VariableSpec or Variable to compare against.

    Returns:
      True if `spec_or_value` is compatible with this `VariableSpec`.
    F)r2   rQ  
value_typerA   r  rB   r   r   )r   spec_or_value
compatibles      r:   r  zVariableSpec.is_compatible_withO
  s    6 md4j$//%BC**//0C0CD <** 3 33<..M$;$;;  -d,CDMM]-C-CCCr<   c                 T     | |j                   |j                  |j                        S )a  Creates a `VariableSpec` from the given `Variable`.

    `value`'s shape, dtype, and trainable attributes will be used to create
    the new `VariableSpec`.

    Example:

    >>> v = tf.Variable([1., 2., 3.])
    >>> VariableSpec.from_value(v)
    VariableSpec(shape=(3,), dtype=tf.float32, trainable=True, alias_id=None)

    Args:
      value: A Variable.

    Returns:
      A `VariableSpec` created from `value`.
    )rB   r   )rA   rB   r   )r  r   s     r:   r>  zVariableSpec.from_valuet
  s    & u{{%++IIr<   c                     |j                   gS r{   r8   r  s     r:   _to_componentszVariableSpec._to_components
  s    LL>r<   c                 v   t        |t        t        f      st        d| d      t	        |      dk7  rt        d| d      |d   }t        |t        j                        r|j                  t        j                  k7  rt        d| d      t        | j                  | j                  | j                  |      S )Nz?Components of a ResourceVariable must be a list or tuple, got fz	 instead.rX   zMComponents of a ResourceVariable must only contain its resource handle, got fr   z@The handle of a ResourceVariable must be a resource tensor, got r8  )r2   rG  r  r   rD   r   r3   r4   rB   r   r[   r   r   rA   )r   r  r9   s      r:   _from_componentszVariableSpec._from_components
  s    j4-0 %%/L	; < <
:! 44><yJ K K]F$$&)/)H &&,XY8 9 9dnn"&**"&**#)+ +r<   c           	          t        j                  g t        j                  t        j                  j
                  t        j                  | j                                    gS )Nr   )r3   
TensorSpecr   DTyper[   
_type_enumr   r   r   s    r:   _component_specszVariableSpec._component_specs
  sJ     	  LL**!!4==9	
 r<   c                 ^    | j                   | j                  | j                  | j                  fS r{   r   r   s    r:   
_serializezVariableSpec._serialize
  s!    ::tzz4>>4==@@r<   c                     t        |       t        |      ury| j                  |j                  t        |   |      S | j                  |j                  t	        d|  d| d      t        |   |      S )NFzDVariableSpec.is_subtype_of doesn't support alias_id=None, got self: z and other: r`  )rQ  r   rh  is_subtype_ofr  )r   r  rn  s     r:   r  zVariableSpec.is_subtype_of
  s    Dze$ }}!7W"5))}} 6 #<<@6#('!, - - 7 ''r<   c                     t         fd|D              ry  j                  !t        d |D              rt           |      S  j                  t        d |D              rt        d  d| d      t           |      S )Nc              3   J   K   | ]  }t              t        |      u  y wr{   )rQ  )r  r  r   s     r:   r  z>VariableSpec.most_specific_common_supertype.<locals>.<genexpr>
  s     
=U4:T%[(
=s    #c              3   8   K   | ]  }|j                   d u   y wr{   r  r  r  s     r:   r  z>VariableSpec.most_specific_common_supertype.<locals>.<genexpr>
  s     7uENNd"7   c              3   8   K   | ]  }|j                   d u   y wr{   r  r  s     r:   r  z>VariableSpec.most_specific_common_supertype.<locals>.<genexpr>
  s     #OuENNd$:#Or  zUVariableSpec.most_specific_common_supertype doesn't support alias_id=None, got self: z and others: r`  )r  r   allrh  most_specific_common_supertyper  )r   othersrn  s   ` r:   r  z+VariableSpec.most_specific_common_supertype
  s    

=f
== 	777W3F;;}}#O#O O #L#'&fXQ!@ A A 71&99r<   c                    |j                   r| S | j                  xs |j                  }|j                  }|j	                  | j
                        r|j                  | j
                        }nt        j                  g t        j                        }t        j                  |j                        }|j                  |       |j                  |      }| j!                  |g      }| j
                  |j#                  | j
                  |       |j%                  |j&                  |      }|j(                  j+                  dt-        j.                  t1        j2                  |                   |S )Nr   _user_specified_namerz  )unnest_onlyrk   naming_scopecontext_graphhas_placeholderr   get_placeholderr3   r  r   r[   r	   InternalPlaceholderContextouter_graphupdate_naming_scopeplaceholder_valuer  r   capturer9   rg   r  r   r  r   rX  )r   placeholder_contextrk   r  r   specspec_contextplaceholders           r:   r  zVariableSpec.placeholder_value
  s%   &&k998+88D'55M**4==9 %44T]]Ch%%b&//:d::

#
#%l&&t,**<8k&&}5h 
	"++DMM8D  ''d'CKNN  6??4#89; Or<   c                 V    t        |t              sJ t        |       |j                  gS r{   )r2   r   r   r9   r  s     r:   
to_tensorszVariableSpec.to_tensors
  s'    e1222eLL>r<   c                 *    t        |t              sJ |S r{   )r2   r   )r   r   r  s      r:   castzVariableSpec.cast
  s    e1222Lr<   c                 ^    t        t               t               t               t                     S r{   )r  r  r   s    r:   _get_structurezVariableSpec._get_structure
  s    %'57EG44r<   c           
          t        |       j                   d| j                   d| j                  d| j                  d| j
                  d
S )Nz(shape=z, dtype=z, trainable=z, alias_id=r  )rQ  r   rA   rB   r   r   r   s    r:   r   zVariableSpec.__repr__
  sO    Dz""#74::,htzzn M*+dmm5FaI Jr<   c                 p    t        | j                  | j                  | j                  | j                  f      S r{   )hashrA   rB   r   r   r   s    r:   __hash__zVariableSpec.__hash__
  s&    TZZGHHr<   c                    t        |       t        |      u xrj | j                  |j                  k(  xrO | j                  |j                  k(  xr4 | j                  |j                  k(  xr | j                  |j                  k(  S r{   )rQ  rA   rB   r   r   r  s     r:   r  zVariableSpec.__eq__  sj    J$u+% ,$***C ,JJ%++%,*...EOO*K,MMU^^+-r<   )r   r   r   r   r   r+  r  r   float32r   r  classmethodr>  r  r  r  r  r  r  r  r	  r  r  r   r  r  r~  r  s   @r:   r   r   >
  s     J')56*"(..D#J J J(+" 	 	A(":$:
5JI-r<   r   c                    |j                   j                          |r4| j                  j                  d      st	        d| j                   d      t        j                  | j                        |j                   _        | j                  |j                   _        | j                  j                  |j                   _        | j                  j                  |j                   _
        | j                  j                  |j                   _        |j                   j                  j                  | j                  j                                |j                   j#                         r)t%        | d      r| j&                  |j                   _        yyy)au  Writes additional information of the variable into the SavedObject proto.

  This allows users to define a `hook` to provide extra information of the
  variable to the SavedObject.

  For example, DistributedVariable class would fill in components in the
  distributed context.

  Args:
    resource_variable: A `ResourceVariable` or `DistributedValue` that has the
      information to be saved into the proto.
    proto: `SavedObject` proto to update.
    options: A `SaveOption` instance that configures save behavior.
    enforce_naming: A bool determining whether to check that names end in the
      expected string ':0'
  r   z#Cowardly refusing to save variable zQ because of unexpected suffix in the name (expected ':0')which won't be restored.r   N)r   SetInParentrk   endswithr   r3   get_op_namer   rB   as_datatype_enumr   r   r   rA   CopyFromas_protorL  rM  r   r   )resource_variabler  rP  enforce_namings       r:   r  r    s.   ( ..-22;;DA
:)../ 001 2 2 &112C2H2HI%...88%..*00AA%..#4#D#D#J#J%.. 0<<BB%.... 1 7 7 @ @ BC))@@  (+/66enn ,r<   r  r+  c                 "    | j                         S )z6Returns the XLA sharding associated with the variable.)r   r  s    r:   get_xla_shardingr  4  s    				  r<   r{   r(  )T)pr   r)  r  typingr   r`  abslr   tensorflow.compiler.tf2xla.opsr   tensorflow.core.configr   tensorflow.core.frameworkr   r   tensorflow.core.functionr	   tensorflow.core.protobufr
   tensorflow.python.checkpointr   tensorflow.python.clientr   tensorflow.python.compatr   rk  tensorflow.python.eagerr   r   r   tensorflow.python.frameworkr   acdr   r   r   r   r   r?  r   r   r   r   r   r3   r   r   tensorflow.python.opsr   r   r    r!   r"   r#   r$   /tensorflow.python.ops.gen_resource_variable_opstensorflow.python.saved_modelr&   tensorflow.python.trackabler'   r[  tensorflow.python.typesr(   tensorflow.python.utilr)   "tensorflow.python.util.deprecationr*    tensorflow.python.util.tf_exportr+   register_read_only_resource_opr7   r;   rV   rc   rv   ry   r*  r   r   r   r   r   r   rC  r4   r   CompositeTensorGradientr0  CompositeTensorr   rD  r  #register_tensor_conversion_functionr  RegisterGradientr  r  r  r  rN  NotDifferentiabler  r  r  	DenseSpecr   register_codecBuiltInTypeSpecCodecTypeSpecProtoVARIABLE_SPECr  r  r   r<   r:   <module>r@     s   )      6 ( 4 2 / / 8 6 = + * ( F 8 A 3 ? 7 . . 6 + ? B 4 + / ; / 2 + + > @ 9 ( ) / 9 6 " " "#3 4 " " "? 3 " " "#3 4 " " "#5 6 " " "#5 6 ,DD ,:N 9=.b-J`  / /f'K%"JI;9--t{{ I;X(#55#Lt6+-=-M-M t6nd<0 d<NI ?  > >.0A* AH &' (
Q6 &'
M (
M 2r:/ ;/&   h    * +   o &
  L LF-=** F-R &  % %///j..<< =A#7L!. !3 !r<   