
    BVh|m                        d Z ddlZddlmZ ddlmZ ddlmZ ddlm	Z	 ddlm
Z
 ddlmZ dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddl m!Z! ddl"m#Z# ddl$m%Z%  G d de&      Z'd Z(d Z)d Z* G d de
jV                        Z,d  Z- G d! d"e&      Z. e%d#g$      d'd%       Z/d'd&Z0y)(z=Prototype decorator for defining legacy-graph-mode functions.    N)function_type)meta_graph_pb2)
struct_pb2)context)function)lift_to_graph)atomic_function)composite_tensor)
func_graph)importer)ops)sparse_tensor)tensor)tensor_shape)tensor_util)resource_variable_ops)variable_scope)
tf_logging)nested_structure_coder)data_structures)nest)	tf_exportc                   :    e Zd ZdZddZed        Zd Zd Zd Z	y)	VariableHolderz&Holds variables for a python function.Nc                 R    || _         || _        t        j                         | _        y N)_fn_share_variablesr   Mapping_variables_by_name)selffnshare_variabless      U/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/eager/wrap_function.py__init__zVariableHolder.__init__/   s"    DH+D-557D    c                     | j                   S r   )r    r!   s    r$   	variableszVariableHolder.variables5   s    """r&   c                 b   |j                  dd      }d}t        j                  |j                  dd      dd      5 }t        j                  |      }||d<   ddd       | j
                  r| j                  j                  d      }| |di |}|| j                  <   |t        j                  j                  g}|j                  rCt        j                  j                  |vr't        |      t        j                  j                  gz   }t        j                  ||       |S # 1 sw Y   xY w)zBCreates variables & adds them to collections to match legacy code.collectionsNnameVariableF)skip_on_eager )popr   
name_scopegetname_from_scope_namer   r    	GraphKeysGLOBAL_VARIABLES	trainableTRAINABLE_VARIABLESlistadd_to_collections)r!   next_creatorkwargsr+   vr,   variable_names          r$   variable_creator_scopez%VariableHolder.variable_creator_scope9   s   **]D1KA 


64 *E
C FJ..t4mfVn
 

!
!
%
%mT
:ay

 
 a/0dm,]]334k{{s}}88K%)J)J(KKk;*H' s   D%%D.c                 D     | j                  | j                        |i |S r   ) call_with_variable_creator_scoper   )r!   argsr;   s      r$   __call__zVariableHolder.__call__T   s#    :400:DKFKKr&   c                       fd}|S )Nc                  ~    t        j                  j                        5   | i |cd d d        S # 1 sw Y   y xY wr   )r   r>   )rA   r;   r"   r!   s     r$   wrappedz@VariableHolder.call_with_variable_creator_scope.<locals>.wrappedY   s9    001L1LM #4"6"# # #s   3<r/   )r!   r"   rE   s   `` r$   r@   z/VariableHolder.call_with_variable_creator_scopeW   s    # Nr&   )NF)
__name__
__module____qualname____doc__r%   propertyr)   r>   rB   r@   r/   r&   r$   r   r   ,   s.    .8 # #6Lr&   r   c                    | j                  d      }|dk(  r|j                  | j                        S |dk(  rt        j                  |j                  | j                  j                        |j                  | j                  j                        |j                  | j                  j                              S |dk(  rt        j                  | j                  j                        }t        j                  |      }| j                  j                   D cg c]  }|j                  |j                         }}|j#                  |      S t%        d| d      c c}w )z@Simplified copy of the deprecated `get_tensor_from_tensor_info`.encodingr,   
coo_sparser
   )type_spec_valuezInvalid TensorInfo.encoding: zC. Valid encodings are 'name', 'coo_sparse', and 'composite_tensor'.)
WhichOneofas_graph_elementr,   r   SparseTensorget_tensor_by_namerM   indices_tensor_namevalues_tensor_namedense_shape_tensor_namer   StructuredValuer
   	type_specr   decode_proto
components_from_components
ValueError)tensor_infographrL   
spec_protospec	componentrY   s          r$   _get_element_from_tensor_infora   `   sB   ##J/( !!+"2"233<%%  !7!7!K!KL  !7!7!J!JK  ""::	<= =
 %%++#44>>@J!..z:D..99;y%**9>>: ;J ;  ,,
4XJ ?+ + , ,	;s   "Ec                 .   t        j                  | j                  | j                  | j                  j
                  | j                  | j                        }| j                  |_        |j                  |j                  | j                         |j                  |j                         |j
                  j                  d      d   }||j                  |<   |j                  j                  t        j                   |             |j#                  |       |S )z4Lifts `old_variable` out of the `FuncGraph` `graph`.)shapedtyper,   r6   extra_handle_data:r   )r   UninitializedVariablerc   rd   opr,   r6   handle_initializer_opadd_capturecapturesplitr    _weak_variablesappendweakrefrefwatch_variable)old_variabler]   variable_holdernew_variabler=   s        r$   _lift_single_variablerv   z   s    &<<??&&$++-, ".!=!=,L'')<)<= --##$##))#.q1-6B/$$]3w{{<89|$	r&   c           
      8   | j                         5  t        j                  t        j                  j                        }t        j                  t        j                  j
                        }| j                  D ch c]  }t        |       c}i }fd}|D ]J  } ||      st        || |      }||t        |      <   j                  t        |j                               L |D ]  } ||      st        || |      }||t        |      <   j                  t        |j                               |j                  sX|j                  }	|	j                  t        j                  j                        }
|
j                  |       |	j                  t        j                  j
                  |        t        j                  j                  t        j                  j
                  fD ]  }t        j                  |      }t!        |      D ]s  \  }}|j#                  t        |      |      ||<   t%        j&                  ||         r=t)        j*                  t(        j,                  dj/                  ||         d       u  	 ddd       yc c}w # 1 sw Y   yxY w)a  Finds resource variables and lifts them into the outer context.

  When we import a GraphDef inside a wrap_function, no Python graph building
  code runs. This means we get VarHandleOps which create variable resources,
  but no corresponding Python objects. Leaving them like this works but gives
  the user no way to interact with or modify the variables outside the graph.

  This method searches for variables and lifts them out as regular variable
  objects when possible, indicating to the FuncGraph that they are captures.

  Args:
    graph: The FuncGraph to lift variables from.
    variable_holder: A VariableHolder to record the lifted variables in.
  c                     | j                   xr | j                  j                  xr3 t        | t        j
                        xr t        | j                        vS r   )_in_graph_moder]   building_function
isinstancer   BaseResourceVariableidri   )r<   existing_capturess    r$   _should_lift_variablez7_lift_unlifted_variables.<locals>._should_lift_variable   sQ     -77,,8Q 5 J JK8 QXX,&779r&   zUnable to create a python object for variable {} because it is a reference variable. It may not be visible to training APIs. If this is a problem, consider rebuilding the SavedModel after running tf.compat.v1.enable_resource_variables().   N)
as_defaultr   get_collectionr4   r5   LOCAL_VARIABLESinternal_capturesr}   rv   addri   ry   r]   get_collection_refremoveadd_to_collection	enumerater2   r   is_resource_variablelogginglog_first_nWARNformat)r]   rt   global_collection_variableslocal_collection_variablesclifted_variablesr   rs   ru   outer_graphglobal_collectioncollection_namemutable_collectionindexcurrentr~   s                  @r$   _lift_unlifted_variablesr      sX     7"%"4"4&&#(!$!3!3%%"'(-(?(?@1A@9 4 7	|	,,%2-9L)*b!4!4567 3 ;	|	,,%2-9L)*b!4!456&&$**+ *<<mm,,.


"
"<
0

'
'mm++\;;( 	&&(E(E  11/B%&89 .%$4$8$8Gg$N5!$99u%'


llB CI&$U+C-		Q7 7
 A7 7s9   A)J;JJ%AJ4AJ DJ;JJJc                   B     e Zd ZdZ	 	 	 d fd	Z fdZ	 	 	 ddZ xZS )WrappedFunctionz*Wraps a tf V1 piece of code in a function.c                    || _         t        ||       |j                  d      j                  j                  D ]%  }t        j
                         j                  |       ' || _        t        j                  |j                  |j                  |j                  j                  |      }t        j                  t	        j                   |j"                        |||      }t$        	| M  |       y )NT)use_pybind11_proto) are_keyword_args_also_positional)_variable_holderr   as_graph_deflibraryr   r   add_function_def
_signaturefunction_type_libfrom_structured_signaturestructured_input_signaturestructured_outputsfunction_capturescapture_typesr	   from_func_graph_inference_namer,   superr%   )
r!   fn_graphrt   attrs	signaturer   fr   	atomic_fn	__class__s
            r$   r%   zWrappedFunction.__init__   s     ,DX7 ""d";CCLL ,oo((+,DO%??++##""00)I	M  //  /5-I 
GYr&   c                    | j                   |rt        d| d      | j                  vt        |      }t	        |      D ]]  \  }}t        | j                  |   t        j                        s.t        j                  || j                  |   j                        ||<   _ | j                  || j                        S t        | 9  ||      S )NzYKeyword arguments are not supported when calling a wrap_function-decorated function. Got .)_arg_keywordsNotImplementedErrorr   r8   r   r{   
tensor_lib	DenseSpecr   convert_to_tensorrd   
_call_flatcaptured_inputsr   
_call_impl)r!   rA   r;   iargr   s        r$   r   zWrappedFunction._call_impl   s    !	!55;HA?@ 	@ 
	$Dzo 	KFAs*J,@,@A++C1C1I1IJDG	K __T4#7#788Wf--r&   c           
          |xs d}t        j                  |d      }|D cg c]  } j                  j                  |       }}|D ]7  }t	        |t
        j                        rt        d| dt        |       d        j                  j                  D 	ch c]  }	t        |	       }
}	|D cg c]  }t        |      |
vs| }}g g g  fdt        j                  |      }t        j                  d      |z   z   D ]C  }|j                   j                  ust        d j                   d	| d
|j                   d        j                  j                         5  t        j                  |      }ddd       t!        j                   z   | j                  j                  z    j                        |j"                  j%                  fdD               |j&                  j%                  D cg c]  }|   	 c}       |j(                  j%                  fd|D                j                  j*                  D ]  \  }}|j-                  ||           D ]v  }|j/                  d      dk(  s|j                  |j0                        }t3        j4                  |      sI|j7                  t9        j:                  |j8                               x  j                  j<                  j?                         D ]  }|jA                  |         j                  jB                  |_!        fd}t        j                  ||d      |_"        |r|\  }}|dn|}||f}||_#        tI        | jJ                  |      }tM        |      |_'        |D cg c]  }|jP                  j0                   c}|_)        |S c c}w c c}	w c c}w # 1 sw Y   ZxY wc c}w c c}w )au  Extract a subgraph of this function's underlying graph.

    Wraps the subgraph in a new `WrappedFunction` object.

    Args:
      feeds: Input tensors to the subgraph to extract, as `Tensor` objects.
      fetches: Possibly-nested Python data structure containing information
        about outputs of the target subgraph. Each entry can either be a
        `Tensor` object (for data outputs), an `Operation` object (for control
        outputs), or a `TensorInfo` proto. Any additional shape/dtype
        information provided in a `TensorInfo` and not present in the original
        graph will be added to the returned subgraph.
      name: (optional) Name to give to the underlying `FuncGraph` of the
        returned object. If no name is provided, the graph's name will be
        `"pruned"`.
      input_signature: (optional) possibly-nested Python data structure
        containing `TensorSpec` objects, with which to populate the returned
        functions's `FuncGraph`'s `structured_input_signature` field.
      are_keyword_args_also_positional: whether the keyword arguments
        in `input_signature` are `POSITIONAL_OR_KEYWORD` arguments. If
        `False` (default), they are treated as `KEYWORD_ONLY`
        arguments.

    Returns:
      A new `WrappedFunction` object containing a copy of the portion of this
        object's graph that goes from `feeds` to `fetches`.
    prunedT)expand_compositesz5All members of argument `feeds` must be tensors. Got z with type r   c                 J   t        | t        j                        rj                  |        | S t        | t        j
                        r|j                  |        t        | j                        }t        j                  |      st        |t        j                        rj                  |       |S j                  |       |S t        | t        j                  t        j                  f      rj                  |        | S j                  j                  |       } |      S )aw  Extract out lists of ops, tensors, and tensor type info.

      Turns TensorInfos into Tensors in the original `fetches` structure.
      Also extracts ops from `fetches`.

      Args:
        fetch: The fetch to preprocess: Tensor, TensorInfo, or Operation, or
          string identifying a Tensor or Operation.

      Returns:
        `fetch` converted to a Tensor.
      )r{   r   	Operationro   r   
TensorInfora   _func_graphr   
is_tf_typer
   CompositeTensorr   Tensorr]   rP   )fetchdecodedgraph_element_fetch_preprocessing_callbackoperation_fetchesr!   tensor_fetchestensor_infoss      r$   r   z<WrappedFunction.prune.<locals>._fetch_preprocessing_callback>  s     
E3==	)  'e^667E"/t7G7GH""7+w 0 @ @A



(  
"
"7
+
*##%5%E%EFHe$

33E:,];;r&   z;Can only prune function whose feeds and fetches from graph z. Input z is from a different graph N)sources
base_graphc              3   (   K   | ]	  }|     y wr   r/   .0xlift_maps     r$   	<genexpr>z(WrappedFunction.prune.<locals>.<genexpr>t  s     DD   c              3   (   K   | ]	  }|     y wr   r/   r   s     r$   r   z(WrappedFunction.prune.<locals>.<genexpr>w  s     ?qx{?r   rL   r,   c                 H    |    }t        |t        j                        ry|S )z#callback for `nest.map_structure()`N)r{   r   r   )fetchedliftedr   s     r$   _structured_output_mappingz9WrappedFunction.prune.<locals>._structured_output_mapping  s#     f	FCMM	*mr&   r/   )rt   r   )*r   flattenr]   rP   r{   r   r   r[   typer   r}   map_structurer   r   r   	FuncGraphr   outputsextendcontrol_outputsinputscapturesrk   rO   r,   r   r   	set_shaper   TensorShape
_functionsvalues_add_functionr)   r   r   r   r   len_num_positional_argsrh   r   )r!   feedsfetchesr,   input_signaturer   
flat_feedstr   r   r   pruned_graph	operationexternal_captureinternal_capturetir   rA   r;   	pruned_fnr   r   r   r   r   r   s   `                    @@@@@r$   prunezWrappedFunction.prune  s   H 8Det<J:DEQ$**--a0EJE 
:,,-#[a	,
 	

 )-

(D(DE1AEE'J2a58I+I!JJJNL< <B   !>HG \\.DIN.(+<< F	
((	( ''+'7'7&839!''!E F 	FF
 
			$	$	& 0))$/l0**N*TZZ999##	%H D^DD  ''.?@)	@B?J??.2jj.A.A M**/:J1KLM A	z	"f	,))"''2!!!$
++l..r?
@	A ZZ""))+ $  #$ "ZZ11L '+&8&8"Gt'EL# $ldF<RTdvo.=L+--)II
 &)_I"<FG&vyy~~GIk F FJb0 0 	AV Hs/   "O O8OO5O#O0-O5#O-)NNF)rF   rG   rH   rI   r%   r   r   __classcell__)r   s   @r$   r   r      s,    2 ', 4.& ',[r&   r   c                      i  fd}|fS )zFiltering out any ops returned by function.

  Args:
    fn: a function

  Returns:
    A tuple of (
      Wrapped function that returns `None` in place of any ops,
      dict that maps the index in the flat output structure to the returned op
    )
  c                       | i |}t        j                  |      }t        t        |            D ],  }||   }t	        |t
        j                        s#||<   d ||<   . t        j                  ||      S r   )r   r   ranger   r{   r   r   pack_sequence_as)rA   r;   r   flat_outputsnoutputr"   returned_opss         r$   wrap_and_filter_returned_opsz:_filter_returned_ops.<locals>.wrap_and_filter_returned_ops  sw    $!&!G<<(L3|$% Af	FCMM	* QQ	
   ,77r&   r/   )r"   r  r  s   ` @r$   _filter_returned_opsr    s     ,8 
&|	33r&   c                   P    e Zd ZdZddZed        Zed        ZddZ	 	 	 	 d	dZ	y)
WrappedGrapha  Class for wrapping multiple TF 1.X functions in a single graph.

  Maintains a dictionary mapping names to wrapped functions. See
  `tf.compat.v1.wrap_function` to learn more about wrapping V1 functions.

  Functions wrapped using this class have access to variables and collections
  created in other wrapped functions, using the standard TF 1.X API (
  `tf.compat.v1.get_variable` or
  `tf.compat.v1.get_default_graph().get_collection(...)`)

  Outside a function, variables and collections may be accessed using the
  `variables` and `graph` properties.

  Example:

  ```
  def add_v1(x):
    with tf.compat.v1.variable_scope('vars', reuse=tf.compat.v1.AUTO_REUSE):
      v = tf.compat.v1.get_variable('v', shape=[], dtype=tf.int32)
    return v + x

  def increment_var_v1(x):
    with tf.compat.v1.variable_scope('vars', reuse=tf.compat.v1.AUTO_REUSE):
      v = tf.compat.v1.get_variable('v', shape=[], dtype=tf.int32)
    return v.assign_add(x)

  g = WrappedGraph()
  add = g.wrap_function(add_v1, [tf.TensorSpec([], tf.int32)])
  increment_var = g.wrap_function(increment_var_v1,
                                  [tf.TensorSpec([], tf.int32)])

  assert len(g.variables) == 1
  assert g.variables[0].numpy() == 0
  increment_var(tf.constant(5))
  assert g.variables[0].numpy() == 5

  ```
  Nc                    |xs t        d      | _        |j                  dd      }|j                  di       }t        j                  |fd|i|| _        t        | j
                  | j                        | _        i | _        y )NT)r#   r,   wrapped_function_graphr+   )	r   r   r0   r   r   r]   r   _wrapped_functionr   )r!   rt   r;   r,   r+   s        r$   r%   zWrappedGraph.__init__  sx    ?>$? 	 ::f67D **]B/K%%dNNvNDJ,TZZ9N9NODDOr&   c                     | j                   S r   )r   r(   s    r$   	functionszWrappedGraph.functions  s    ??r&   c                 .    | j                   j                  S r   )r   r)   r(   s    r$   r)   zWrappedGraph.variables  s      ***r&   c                 *    | j                  |||      S )aD  Wraps a TF 1.X function and returns an eager-compatible function.

    All functions wrapped in the same `WrappedGraph` will have access to the
    same graph (`tf.compat.v1.get_default_graph` to get the graph object
    within a function, or `WrappedGraph.graph` to get the graph outside a
    function). Variables created within the function will be added to the
    `variables` list.

    Function inputs: All inputs to the function must be tensors (nested ok),
    with their shapes and dtypes defined in the `signature` argument.

    Function outputs:

      * The 1.X function may return tensors, variables, and ops. The wrapped
        eager-compatible function will always return tensors in the same nested
        structure.
      * Variables are replaced with a tensor containing the latest read values.
      * Returned ops are executed, and replaced with None.
      * The order of op execution and variable reads in the return is
        nondeterministic. For example:

        ```
        def update_var(x):
          v = tf.Variable(0)
          op = tf.compat.v1.assign(v, x).op
          return v, op

        g = WrappedGraph()
        fn = g.wrap_function(update_var)
        read_value, _ = fn(tf.constant(3))
        print(read_value.numpy())  # could be 0 or 3
        print(g.variables[0].numpy()) # always 3
        ```

    To ensure that ops in the function are executed (e.g. ops added to the
    `tf.GraphKeys.UPDATE_OPS` collection), include them in the function returns.

    Args:
      fn: a 1.X tensorflow function.
      signature: a possibly nested sequence of `TensorSpecs` specifying the
        shapes and dtypes of the arguments.
      name: an optional string name for the function. The function will be saved
        with key `name` in the `functions` dictionary.

    Returns:
      An eager-compatible function.
    )r   r,   )_wrap_function)r!   r"   r   r,   s       r$   wrap_functionzWrappedGraph.wrap_function  s    ` rYTBBr&   c           	         t        | j                  j                  |            \  }}t        j                  d||||d| j
                         | j
                  j                  dt        | j
                  j                          }t        j                  | j
                  j                        }	|j                         D ]
  \  }
}||	|
<    t        j                  | j
                  j                  |	      }|xs |j                  }| j                  j!                  |||| j
                  j"                        }|| j$                  |<   |S )zAInternal wrap function method with extended func_graph arguments.NF)rA   r;   r   add_control_dependenciesr   )r  r   r@   r   func_graph_from_py_funcr]   r   r   r   r   r   r   itemsr   rF   r	  r   r   r   )r!   r"   rA   r;   r   r,   fn_with_filter_and_scoper  	fn_inputsflat_fn_outputsr   rh   
fn_outputswrapped_functions                 r$   r  zWrappedGraph._wrap_function/  s,    .B>>rB.D*l && !&:: 

!!"<C

(;(;$<#<=I ll4::#@#@AO!'') "	r!oe"&&tzz'D'D'68J 2;;D--33:tTZZ%J%JL,DOODr&   r   )NNNN)
rF   rG   rH   rI   r%   rJ   r  r)   r  r  r/   r&   r$   r  r    sO    %N   + +0Ch  #$r&   r  r  )v1c                 z    t        |       }d}|d|z   }t        t        j                  ||dd|di       ||      S )a0  Wraps the TF 1.x function fn into a graph function.

  The python function `fn` will be called once with symbolic arguments specified
  in the `signature`, traced, and turned into a graph function. Any variables
  created by `fn` will be owned by the object returned by `wrap_function`. The
  resulting graph function can be called with tensors which match the
  signature.

  ```python
  def f(x, do_add):
    v = tf.Variable(5.0)
    if do_add:
      op = v.assign_add(x)
    else:
      op = v.assign_sub(x)
    with tf.control_dependencies([op]):
      return v.read_value()

  f_add = tf.compat.v1.wrap_function(f, [tf.TensorSpec((), tf.float32), True])

  assert float(f_add(1.0)) == 6.0
  assert float(f_add(1.0)) == 7.0

  # Can call tf.compat.v1.wrap_function again to get a new trace, a new set
  # of variables, and possibly different non-template arguments.
  f_sub= tf.compat.v1.wrap_function(f, [tf.TensorSpec((), tf.float32), False])

  assert float(f_sub(1.0)) == 4.0
  assert float(f_sub(1.0)) == 3.0
  ```

  Both `tf.compat.v1.wrap_function` and `tf.function` create a callable
  TensorFlow graph. But while `tf.function` runs all stateful operations
  (e.g. `tf.print`) and sequences operations to provide the same semantics as
  eager execution, `wrap_function` is closer to the behavior of `session.run` in
  TensorFlow 1.x. It will not run any operations unless they are required to
  compute the function's outputs, either through a data dependency or a control
  dependency. Nor will it sequence operations.

  Unlike `tf.function`, `wrap_function` will only trace the Python function
  once. As with placeholders in TF 1.x, shapes and dtypes must be provided to
  `wrap_function`'s `signature` argument.

  Since it is only traced once, variables and state may be created inside the
  function and owned by the function wrapper object.

  Args:
    fn: python function to be wrapped
    signature: the placeholder and python arguments to be passed to the wrapped
      function
    name: Optional. The name of the function.

  Returns:
    the wrapped graph function.
  r  Nwrapped_function_F)rA   r;   r   r  r+   )rt   r   )r   r   r   r  )r"   r   r,   holderfunc_graph_names        r$   r  r  V  s\    r "&&/	)D0O	((

#( 

 
r&   c                       fd}t        |g       }|j                  }|j                  t        j                  |j
                  |      t        j                  |j
                  |            S )a(  Creates a ConcreteFunction from a GraphDef.

  Args:
    graph_def: A GraphDef to make a function out of.
    inputs: A Tensor name or nested structure of names in `graph_def` which
      should be inputs to the function.
    outputs: A Tensor name or nested structure of names in `graph_def` which
      should be outputs of the function.
    captures: (Optional) A dictionary mapping node names in `graph_def` that
      should be captured as inputs to tensors containing the value of the
      captured inputs.

  Returns:
    A ConcreteFunction.
  c            	          t        j                  d       t        j                         } 8D ]2  }| j	                  |   | j                  t        |      dz                4 y y )N )r,   z:0)r   import_graph_defr   get_default_graphrk   rR   str)r]   r   r   	graph_defs     r$   _imports_graph_defz3function_from_graph_def.<locals>._imports_graph_def  sd    ib1!!#E P!(1+u'?'?A'NOP r&   )r  r]   r   r   r   rP   )r$  r   r   r   r%  wrapped_importimport_graphs   `  `   r$   function_from_graph_defr(    sd    "P !!3R8.%%,			
66?
66@
B Br&   r   )1rI   rp   %tensorflow.core.function.polymorphismr   r   tensorflow.core.protobufr   r   tensorflow.python.eagerr   r   r   ,tensorflow.python.eager.polymorphic_functionr	   tensorflow.python.frameworkr
   r   r   r   r   r   r   r   r   tensorflow.python.opsr   r   tensorflow.python.platformr   r   tensorflow.python.saved_modelr   tensorflow.python.trackabler   tensorflow.python.utilr    tensorflow.python.util.tf_exportr   objectr   ra   rv   r   ConcreteFunctionr   r  r  r  r(  r/   r&   r$   <module>r6     s     D  T 3 / + , 1 H 8 2 0 + 5 < 4 3 7 0 < @ 7 ' 61V 1h,4.FTGh// GT46S6 Sl  F !FRBr&   