
    AVh                        d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	 ddlm
Z
 dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ  edg      ej:                  	 	 	 	 	 d.d              Z edg       ej:                   ej>                  dddd      	 	 	 	 	 d.d                     Z  edg      ej:                  	 	 	 	 	 d.d               Z! edg       ej:                   ej>                  dd!dd      	 	 	 	 	 d.d"                     Z" ed#g      ej:                  	 	 	 	 	 	 	 d/d$              Z# ed#g       ej:                   ej>                  dd%dd      	 	 	 	 	 	 	 d/d&                     Z$d0d'Z%d0d(Z&d) Z'd* Z(d1d+Z)	 	 d1d,Z*	 	 	 d2d-Z+y)3zFunctional operations.    )attr_value_pb2)context)constant_op)dtypes)function)ops)tensor)tensor_shape)	array_ops)gen_functional_ops)math_ops)tensor_array_ops)variable_scope)
while_loop)remote_call)symbolic_gradient)deprecation)dispatch)nest)	tf_exportfoldl)v1NTFc           
          t               st         j                   d      fd}t        j                          }t        j                  |d|g      5  |r6t        j                         }	d}
|	j                  |	j                  d        d}
t        j                  |      D cg c]  }t        j                  |d	       }}t        j                  |d
   j                   d
         xs t#        j                   |d
         d
   t        j$                  ||      |-t        j$                  d       }t'        j(                  d      }n|}t'        j(                  d
      } fd}t+        j*                  fd|||g|||      \  }}|r
r	j                  d       |cddd       S c c}w # 1 sw Y   yxY w)a  foldl on the list of tensors unpacked from `elems` on dimension 0.

  This foldl operator repeatedly applies the callable `fn` to a sequence
  of elements from first to last. The elements are made of the tensors
  unpacked from `elems` on dimension 0. The callable fn takes two tensors as
  arguments. The first argument is the accumulated value computed from the
  preceding invocation of fn, and the second is the value at the current
  position of `elems`. If `initializer` is None, `elems` must contain at least
  one element, and its first element is used as the initializer.

  Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
  of the result tensor is fn(initializer, values[0]).shape`.

  This method also allows multi-arity `elems` and output of `fn`.  If `elems`
  is a (possibly nested) list or tuple of tensors, then each of these tensors
  must have a matching first (unpack) dimension.  The signature of `fn` may
  match the structure of `elems`.  That is, if `elems` is
  `(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
  `fn = lambda (t1, [t2, t3, [t4, t5]]):`.

  Args:
    fn: The callable to be performed.
    elems: A tensor or (possibly nested) sequence of tensors, each of which will
      be unpacked along their first dimension.  The nested sequence of the
      resulting slices will be the first argument to `fn`.
    initializer: (optional) A tensor or (possibly nested) sequence of tensors,
      as the initial value for the accumulator.
    parallel_iterations: (optional) The number of iterations allowed to run in
      parallel.
    back_prop: (optional) True enables support for back propagation.
    swap_memory: (optional) True enables GPU-CPU memory swapping.
    name: (optional) Name prefix for the returned tensors.

  Returns:
    A tensor or (possibly nested) sequence of tensors, resulting from applying
    `fn` consecutively to the list of tensors unpacked from `elems`, from first
    to last.

  Raises:
    TypeError: if `fn` is not callable.

  Example:
    ```python
    elems = tf.constant([1, 2, 3, 4, 5, 6])
    sum = foldl(lambda a, x: a + x, elems)
    # sum == 21
    ```
  5 is not callable. Please provide a callable function.c                 h    t        j                  | j                  dd      j                  |       S NFT)dtypesizedynamic_sizeinfer_shaper   TensorArrayr   unstackelemns    T/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/ops/functional_ops.py	create_tazfoldl.<locals>.create_tag   -    ''jjqu!'$-(    r   FNc                     | j                   S Ndeviceops    r'   <lambda>zfoldl.<locals>.<lambda>x   
    ryy r*   Tr%   namer   c                 $    | j                  d      S Nr   read)r%   s    r'   r1   zfoldl.<locals>.<lambda>   s    $))A, r*      c                 X     t        j                   fd      } ||      } dz   |gS )Nc                 &    | j                        S r,   r7   r%   is    r'   r1   z(foldl.<locals>.compute.<locals>.<lambda>   s    tyy| r*   r9   r   map_structure)r=   aelem_ielems_tafns   `  r'   computezfoldl.<locals>.compute   s/    !!";XFf
Q-a!eQZr*   c                     | k  S r,    )r=   r@   r&   s     r'   r1   zfoldl.<locals>.<lambda>   s    QU r*   parallel_iterations	back_propswap_memorymaximum_iterations)callable	TypeError__name__r   executing_eagerlyr   
name_scopevsget_variable_scopecaching_deviceset_caching_devicer   flattenconvert_to_tensorr
   dimension_valueshaper   r?   r   constantr   )rC   elemsinitializerrH   rI   rJ   r4   r(   in_graph_modevarscope varscope_caching_device_was_noner%   
elems_flatr@   r=   rD   _r_arB   r&   s   `                 @@r'   r   r   *   s   r 
"
;;-LMO O(
 //11-
~~dGeW- 1  &&(h).&		 	 	( 	##$89+/( >B\\%=P59d0J  	$$Z]%8%8%;< 	*
1&q)  !!)U3H


6
Aa


q
!a
a


q
!a
 ""!Q/FAs 9!!$'c1 11 1s    AF6)F1C F61F66F?zback_prop=False is deprecated. Consider using tf.stop_gradient instead.
Instead of:
results = tf.foldl(fn, elems, back_prop=False)
Use:
results = tf.nest.map_structure(tf.stop_gradient, tf.foldl(fn, elems)))	warn_oncerI   c           	      &    t        | ||||||      S )a  foldl on the list of tensors unpacked from `elems` on dimension 0.

  This foldl operator repeatedly applies the callable `fn` to a sequence
  of elements from first to last. The elements are made of the tensors
  unpacked from `elems` on dimension 0. The callable fn takes two tensors as
  arguments. The first argument is the accumulated value computed from the
  preceding invocation of fn, and the second is the value at the current
  position of `elems`. If `initializer` is None, `elems` must contain at least
  one element, and its first element is used as the initializer.

  Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
  of the result tensor is fn(initializer, values[0]).shape`.

  This method also allows multi-arity `elems` and output of `fn`.  If `elems`
  is a (possibly nested) list or tuple of tensors, then each of these tensors
  must have a matching first (unpack) dimension.  The signature of `fn` may
  match the structure of `elems`.  That is, if `elems` is
  `(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
  `fn = lambda (t1, [t2, t3, [t4, t5]]):`.

  Args:
    fn: The callable to be performed.
    elems: A tensor or (possibly nested) sequence of tensors, each of which will
      be unpacked along their first dimension.  The nested sequence of the
      resulting slices will be the first argument to `fn`.
    initializer: (optional) A tensor or (possibly nested) sequence of tensors,
      as the initial value for the accumulator.
    parallel_iterations: (optional) The number of iterations allowed to run in
      parallel.
    back_prop: (optional) Deprecated. False disables support for back
      propagation. Prefer using `tf.stop_gradient` instead.
    swap_memory: (optional) True enables GPU-CPU memory swapping.
    name: (optional) Name prefix for the returned tensors.

  Returns:
    A tensor or (possibly nested) sequence of tensors, resulting from applying
    `fn` consecutively to the list of tensors unpacked from `elems`, from first
    to last.

  Raises:
    TypeError: if `fn` is not callable.

  Example:
    ```python
    elems = tf.constant([1, 2, 3, 4, 5, 6])
    sum = tf.foldl(lambda a, x: a + x, elems)
    # sum == 21
    ```
  rC   rZ   r[   rH   rI   rJ   r4   )r   rd   s          r'   foldl_v2re      '    F 
	-
 r*   foldrc           
      @    t               st         j                   d      fd}t        j                          }t        j                  |d|g      5  |r6t        j                         }	d}
|	j                  |	j                  d        d}
t        j                  |      D cg c]  }t        j                  |d	       }}t        j                  |d
   j                   d
         xs t#        j                   |d
         d
   t        j$                  ||      |dz
  t        j$                  fd      }n|} fd}t'        j&                  d ||g|||      \  }}|r
r	j                  d       |cddd       S c c}w # 1 sw Y   yxY w)aj  foldr on the list of tensors unpacked from `elems` on dimension 0.

  This foldr operator repeatedly applies the callable `fn` to a sequence
  of elements from last to first. The elements are made of the tensors
  unpacked from `elems`. The callable fn takes two tensors as arguments.
  The first argument is the accumulated value computed from the preceding
  invocation of fn, and the second is the value at the current position of
  `elems`. If `initializer` is None, `elems` must contain at least one element,
  and its first element is used as the initializer.

  Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
  of the result tensor is `fn(initializer, values[0]).shape`.

  This method also allows multi-arity `elems` and output of `fn`.  If `elems`
  is a (possibly nested) list or tuple of tensors, then each of these tensors
  must have a matching first (unpack) dimension.  The signature of `fn` may
  match the structure of `elems`.  That is, if `elems` is
  `(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
  `fn = lambda (t1, [t2, t3, [t4, t5]]):`.

  Args:
    fn: The callable to be performed.
    elems: A tensor or (possibly nested) sequence of tensors, each of which will
      be unpacked along their first dimension.  The nested sequence of the
      resulting slices will be the first argument to `fn`.
    initializer: (optional) A tensor or (possibly nested) sequence of tensors,
      as the initial value for the accumulator.
    parallel_iterations: (optional) The number of iterations allowed to run in
      parallel.
    back_prop: (optional) True enables support for back propagation.
    swap_memory: (optional) True enables GPU-CPU memory swapping.
    name: (optional) Name prefix for the returned tensors.

  Returns:
    A tensor or (possibly nested) sequence of tensors, resulting from applying
    `fn` consecutively to the list of tensors unpacked from `elems`, from last
    to first.

  Raises:
    TypeError: if `fn` is not callable.

  Example:
    ```python
    elems = [1, 2, 3, 4, 5, 6]
    sum = foldr(lambda a, x: a + x, elems)
    # sum == 21
    ```
  r   c                 h    t        j                  | j                  dd      j                  |       S r   r!   r$   s    r'   r(   zfoldr.<locals>.create_ta+  r)   r*   rg   FNc                     | j                   S r,   r-   r/   s    r'   r1   zfoldr.<locals>.<lambda><  r2   r*   Tr%   r3   r   r9   c                 &    | j                        S r,   r7   r<   s    r'   r1   zfoldr.<locals>.<lambda>K  s    $))A, r*   c                 \      dz   t        j                   fd      } ||      } |gS )Nr9   c                 &    | j                        S r,   r7   r<   s    r'   r1   z(foldr.<locals>.compute.<locals>.<lambda>R  s    TYYq\ r*   r>   )r=   r@   r%   a_outrB   rC   s   `   r'   rD   zfoldr.<locals>.computeP  s4    1fa 98DdDkeZr*   c                     | dkD  S r6   rF   )r=   r@   s     r'   r1   zfoldr.<locals>.<lambda>W  s
    QU r*   rG   )rL   rM   rN   r   rO   r   rP   rQ   rR   rS   rT   r   rU   rV   r
   rW   rX   r   r?   r   )rC   rZ   r[   rH   rI   rJ   r4   r(   r\   r]   r^   r%   r_   r@   rD   r`   ra   rB   r=   r&   s   `                @@@r'   rg   rg      s   r 
"
;;-LMO O(
 //11-
~~dGeW- 2  &&(h).&		 	 	( 	##$89+/( >B\\%=P59d0J  	$$Z]%8%8%;< 	*
1&q)  !!)U3H
a%a


6
Aa
a
a ""!Q/FAs 9!!$'e2 22 2s    AF*FB=FFFzback_prop=False is deprecated. Consider using tf.stop_gradient instead.
Instead of:
results = tf.foldr(fn, elems, back_prop=False)
Use:
results = tf.nest.map_structure(tf.stop_gradient, tf.foldr(fn, elems))c           	      &    t        | ||||||      S )a  foldr on the list of tensors unpacked from `elems` on dimension 0.

  This foldr operator repeatedly applies the callable `fn` to a sequence
  of elements from last to first. The elements are made of the tensors
  unpacked from `elems`. The callable fn takes two tensors as arguments.
  The first argument is the accumulated value computed from the preceding
  invocation of fn, and the second is the value at the current position of
  `elems`. If `initializer` is None, `elems` must contain at least one element,
  and its first element is used as the initializer.

  Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
  of the result tensor is `fn(initializer, values[0]).shape`.

  This method also allows multi-arity `elems` and output of `fn`.  If `elems`
  is a (possibly nested) list or tuple of tensors, then each of these tensors
  must have a matching first (unpack) dimension.  The signature of `fn` may
  match the structure of `elems`.  That is, if `elems` is
  `(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
  `fn = lambda (t1, [t2, t3, [t4, t5]]):`.

  Args:
    fn: The callable to be performed.
    elems: A tensor or (possibly nested) sequence of tensors, each of which will
      be unpacked along their first dimension.  The nested sequence of the
      resulting slices will be the first argument to `fn`.
    initializer: (optional) A tensor or (possibly nested) sequence of tensors,
      as the initial value for the accumulator.
    parallel_iterations: (optional) The number of iterations allowed to run in
      parallel.
    back_prop: (optional) Deprecated. False disables support for back
      propagation. Prefer using `tf.stop_gradient` instead.
    swap_memory: (optional) True enables GPU-CPU memory swapping.
    name: (optional) Name prefix for the returned tensors.

  Returns:
    A tensor or (possibly nested) sequence of tensors, resulting from applying
    `fn` consecutively to the list of tensors unpacked from `elems`, from last
    to first.

  Raises:
    TypeError: if `fn` is not callable.

  Example:
    ```python
    elems = [1, 2, 3, 4, 5, 6]
    sum = tf.foldr(lambda a, x: a + x, elems)
    # sum == 21
    ```
  rd   )rg   rd   s          r'   foldr_v2rq   f  rf   r*   scanc	                 p    !"#$% t               st         j                   d      t        j                          fd}	 fd! $|	#!%n t        j                        $$fd#$fd% |	      }
t        j                          }t        j                  |d|
      5  |r6t        j                         }d}|j                  |j                  d	        d
}|
D cg c]  }t        j                  |d       }
}t        j                  |
d   j                   d         ""t#        j                   |
d         d   "|
D cg c]3  }t%        j&                  |j(                  "d|j                   dd d
      5 c}t+        |
      D cg c]  \  }}|j-                  |       c}}(D cg c]  }|j/                  r"dz
  nd       }}d}n, #      }|D cg c]  }t        j                  |       }}d}|D cg c]4  }t%        j&                  |j(                  "|r|j                   ndd|      6 }}5t+        ||      D cg c]  \  }}|j1                  r"dz
  nd|        }}} !#%fd}r"dz
  |z
  }d }n|}"fd}t3        j2                  |||||f|||"      \  }}}|D cg c]  }|j5                          }}t        j6                  t        j                  |
d   j9                         j;                  d      d               }|
dd D ]Y  }|j=                  t        j6                  t        j                  |j9                         j;                  d      d                      [ |D ]F  }|j?                  t        j@                  |      jC                  |j9                         dd              H |rrj                  d        %|      cddd       S c c}w c c}w c c}}w c c}w c c}w c c}w c c}}w c c}w # 1 sw Y   yxY w)a  scan on the list of tensors unpacked from `elems` on dimension 0.

  See also `tf.map_fn`.

  The simplest version of `scan` repeatedly applies the callable `fn` to a
  sequence of elements from first to last. The elements are made of the tensors
  unpacked from `elems` on dimension 0. The callable fn takes two tensors as
  arguments. The first argument is the accumulated value computed from the
  preceding invocation of fn, and the second is the value at the current
  position of `elems`. If `initializer` is None, `elems` must contain at least
  one element, and its first element is used as the initializer.

  Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
  of the result tensor is `[len(values)] + fn(initializer, values[0]).shape`.
  If reverse=True, it's fn(initializer, values[-1]).shape.

  This method also allows multi-arity `elems` and accumulator.  If `elems`
  is a (possibly nested) list or tuple of tensors, then each of these tensors
  must have a matching first (unpack) dimension.  The second argument of
  `fn` must match the structure of `elems`.

  If no `initializer` is provided, the output structure and dtypes of `fn`
  are assumed to be the same as its input; and in this case, the first
  argument of `fn` must match the structure of `elems`.

  If an `initializer` is provided, then the output of `fn` must have the same
  structure as `initializer`; and the first argument of `fn` must match
  this structure.

  For example, if `elems` is `(t1, [t2, t3])` and `initializer` is
  `[i1, i2]` then an appropriate signature for `fn` in `python2` is:
  `fn = lambda (acc_p1, acc_p2), (t1, [t2, t3]):` and `fn` must return a list,
  `[acc_n1, acc_n2]`.  An alternative correct signature for `fn`, and the
   one that works in `python3`, is:
  `fn = lambda a, t:`, where `a` and `t` correspond to the input tuples.

  Args:
    fn: The callable to be performed.  It accepts two arguments.  The first will
      have the same structure as `initializer` if one is provided, otherwise it
      will have the same structure as `elems`.  The second will have the same
      (possibly nested) structure as `elems`.  Its output must have the same
      structure as `initializer` if one is provided, otherwise it must have the
      same structure as `elems`.
    elems: A tensor or (possibly nested) sequence of tensors, each of which will
      be unpacked along their first dimension.  The nested sequence of the
      resulting slices will be the first argument to `fn`.
    initializer: (optional) A tensor or (possibly nested) sequence of tensors,
      initial value for the accumulator, and the expected output type of `fn`.
    parallel_iterations: (optional) The number of iterations allowed to run in
      parallel.
    back_prop: (optional) True enables support for back propagation.
    swap_memory: (optional) True enables GPU-CPU memory swapping.
    infer_shape: (optional) False disables tests for consistent output shapes.
    reverse: (optional) True scans the tensor last to first (instead of first to
      last).
    name: (optional) Name prefix for the returned tensors.

  Returns:
    A tensor or (possibly nested) sequence of tensors.  Each tensor packs the
    results of applying `fn` to tensors unpacked from `elems` along the first
    dimension, and the previous accumulator value(s), from first to last (or
    last to first, if `reverse=True`).

  Raises:
    TypeError: if `fn` is not callable or the structure of the output of
      `fn` and `initializer` do not match.
    ValueError: if the lengths of the output of `fn` and `initializer`
      do not match.

  Examples:
    ```python
    elems = np.array([1, 2, 3, 4, 5, 6])
    sum = scan(lambda a, x: a + x, elems)
    # sum == [1, 3, 6, 10, 15, 21]
    sum = scan(lambda a, x: a + x, elems, reverse=True)
    # sum == [21, 20, 18, 15, 11, 6]
    ```

    ```python
    elems = np.array([1, 2, 3, 4, 5, 6])
    initializer = np.array(0)
    sum_one = scan(
        lambda a, x: x[0] - x[1] + a, (elems + 1, elems), initializer)
    # sum_one == [1, 2, 3, 4, 5, 6]
    ```

    ```python
    elems = np.array([1, 0, 0, 0, 0, 0])
    initializer = (np.array(0), np.array(1))
    fibonaccis = scan(lambda a, _: (a[1], a[0] + a[1]), elems, initializer)
    # fibonaccis == ([1, 1, 2, 3, 5, 8], [1, 2, 3, 5, 8, 13])
    ```
  r   c                 8    rt        j                  |       S | gS r,   r   rU   )xinput_is_sequences    r'   r1   zscan.<locals>.<lambda>   s    /@DLLO qc r*   c                 >    rt        j                  |       S | d   S r6   r   pack_sequence_as)rv   rZ   rw   s    r'   
input_packzscan.<locals>.input_pack"  s!    .?4  *IQqTIr*   Nc                 8    rt        j                  |       S | gS r,   ru   )rv   output_is_sequences    r'   r1   zscan.<locals>.<lambda>+  s    2Dt||A 1# r*   c                 >    rt        j                  |       S | d   S r6   ry   )rv   r[   r}   s    r'   output_packzscan.<locals>.output_pack-  s)    # ##K3 /)*1/r*   rr   Fc                     | j                   S r,   r-   r/   s    r'   r1   zscan.<locals>.<lambda>?  r2   r*   Tr%   r3   r   r9   )r   r   r   element_shaper    )r   r   r   r   r    c                 J    D cg c]  }|j                  |        c}      } |      } ||      }t        j                  n|        |      }t        ||      D 	cg c]  \  }}	|j	                  | |	       }}}	r| dz
  }
n| dz   }
|
||fS c c}w c c}	}w )a  The loop body of scan.

      Args:
        i: the loop counter.
        a_flat: the accumulator value(s), flattened.
        tas: the output accumulator TensorArray(s), flattened.

      Returns:
        [i + 1, a_flat, tas]: the updated counter + new accumulator values +
          updated TensorArrays

      Raises:
        TypeError: if initializer and fn() output structure do not match
        ValueType: if initializer and fn() output lengths do not match
      r9   )r8   r   assert_same_structurezipwrite)r=   a_flattaselem_tapacked_elemspacked_arn   
flat_a_outtavaluenext_irZ   rB   rC   r[   r{   output_flattenr   reverses              r'   rD   zscan.<locals>.computer  s        IWa IJlV$h<(e
  +*=;!&(!%(j36sJ3GHKRRXXaHcH	QQj#&& !J Is   B)Bc                     | dk\  S r6   rF   )r=   _1_2s      r'   r1   zscan.<locals>.<lambda>  s
    AF r*   c                     | k  S r,   rF   )r=   r   r   r&   s      r'   r1   zscan.<locals>.<lambda>  s    AE r*   rG   )"rL   rM   rN   r   	is_nestedr   rO   r   rP   rQ   rR   rS   rT   rV   r
   rW   rX   r   r   r"   r   r   r#   r8   r   r   stack	Dimension	get_shapewith_rank_at_leastassert_is_compatible_with	set_shapeTensorShapeconcatenate)&rC   rZ   r[   rH   rI   rJ   r    r   r4   input_flattenr_   r\   r]   r^   r%   r   r   r=   initializer_flatinitaccs_taacc_tar@   rD   	initial_i	conditionr`   ra   rresults_flatn_staticrB   rw   r{   r&   r   r}   r   s&   ```    `                       @@@@@@@r'   rr   rr     sg   P 
"
;;-LMO O nnU+I-J *"NK4MN/ U#*//11-
~~dFJ/ |%  &&(h).&		 	 	( 	##$89+/( >H59d0J 
 	$$Z]%8%8%;<Ay
//*Q-
(
+a +5 #' 	$$****QR.	H 47x3L"/'4H ?GHt		7!a%2HfH
a'48HI%%d+IfI
a 28 *. 	$$**(3$**#	%G   !&1vq ,,q1uQ
2g 
' ': a%!)i*ii)i%%)VW-/IAq# (++!AGGI+L+%%$$qM##%88;A>	@AH 12 ?((

 
 **.."55a8;=>??
  Mkk

"
"8
,
8
8qr9J
KMM 9!!$'|$y|% |%
 I J` ,S|% |%su   ,=P,)PAP,8PP,P4
P,>PP,.P
P,9PP,#P!AP,P'#DP,*P,,P5zback_prop=False is deprecated. Consider using tf.stop_gradient instead.
Instead of:
results = tf.scan(fn, elems, back_prop=False)
Use:
results = tf.nest.map_structure(tf.stop_gradient, tf.scan(fn, elems))c	                 *    t        | ||||||||	      S )a2  scan on the list of tensors unpacked from `elems` on dimension 0.

  The simplest version of `scan` repeatedly applies the callable `fn` to a
  sequence of elements from first to last. The elements are made of the tensors
  unpacked from `elems` on dimension 0. The callable fn takes two tensors as
  arguments. The first argument is the accumulated value computed from the
  preceding invocation of fn, and the second is the value at the current
  position of `elems`. If `initializer` is None, `elems` must contain at least
  one element, and its first element is used as the initializer.

  Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
  of the result tensor is `[len(values)] + fn(initializer, values[0]).shape`.
  If reverse=True, it's fn(initializer, values[-1]).shape.

  This method also allows multi-arity `elems` and accumulator.  If `elems`
  is a (possibly nested) list or tuple of tensors, then each of these tensors
  must have a matching first (unpack) dimension.  The second argument of
  `fn` must match the structure of `elems`.

  If no `initializer` is provided, the output structure and dtypes of `fn`
  are assumed to be the same as its input; and in this case, the first
  argument of `fn` must match the structure of `elems`.

  If an `initializer` is provided, then the output of `fn` must have the same
  structure as `initializer`; and the first argument of `fn` must match
  this structure.

  For example, if `elems` is `(t1, [t2, t3])` and `initializer` is
  `[i1, i2]` then an appropriate signature for `fn` in `python2` is:
  `fn = lambda (acc_p1, acc_p2), (t1, [t2, t3]):` and `fn` must return a list,
  `[acc_n1, acc_n2]`.  An alternative correct signature for `fn`, and the
   one that works in `python3`, is:
  `fn = lambda a, t:`, where `a` and `t` correspond to the input tuples.

  Args:
    fn: The callable to be performed.  It accepts two arguments.  The first will
      have the same structure as `initializer` if one is provided, otherwise it
      will have the same structure as `elems`.  The second will have the same
      (possibly nested) structure as `elems`.  Its output must have the same
      structure as `initializer` if one is provided, otherwise it must have the
      same structure as `elems`.
    elems: A tensor or (possibly nested) sequence of tensors, each of which will
      be unpacked along their first dimension.  The nested sequence of the
      resulting slices will be the first argument to `fn`.
    initializer: (optional) A tensor or (possibly nested) sequence of tensors,
      initial value for the accumulator, and the expected output type of `fn`.
    parallel_iterations: (optional) The number of iterations allowed to run in
      parallel.
    back_prop: (optional) Deprecated. False disables support for back
      propagation. Prefer using `tf.stop_gradient` instead.
    swap_memory: (optional) True enables GPU-CPU memory swapping.
    infer_shape: (optional) False disables tests for consistent output shapes.
    reverse: (optional) True scans the tensor last to first (instead of first to
      last).
    name: (optional) Name prefix for the returned tensors.

  Returns:
    A tensor or (possibly nested) sequence of tensors.  Each tensor packs the
    results of applying `fn` to tensors unpacked from `elems` along the first
    dimension, and the previous accumulator value(s), from first to last (or
    last to first, if `reverse=True`).

  Raises:
    TypeError: if `fn` is not callable or the structure of the output of
      `fn` and `initializer` do not match.
    ValueError: if the lengths of the output of `fn` and `initializer`
      do not match.

  Examples:
    ```python
    elems = np.array([1, 2, 3, 4, 5, 6])
    sum = scan(lambda a, x: a + x, elems)
    # sum == [1, 3, 6, 10, 15, 21]
    sum = scan(lambda a, x: a + x, elems, reverse=True)
    # sum == [21, 20, 18, 15, 11, 6]
    ```

    ```python
    elems = np.array([1, 2, 3, 4, 5, 6])
    initializer = np.array(0)
    sum_one = scan(
        lambda a, x: x[0] - x[1] + a, (elems + 1, elems), initializer)
    # sum_one == [1, 2, 3, 4, 5, 6]
    ```

    ```python
    elems = np.array([1, 0, 0, 0, 0, 0])
    initializer = (np.array(0), np.array(1))
    fibonaccis = scan(lambda a, _: (a[1], a[0] + a[1]), elems, initializer)
    # fibonaccis == ([1, 1, 2, 3, 5, 8], [1, 2, 3, 5, 8, 13])
    ```
  	rC   rZ   r[   rH   rI   rJ   r    r   r4   )rr   r   s	            r'   scan_v2r     s-    ` 
	-	
 	r*   c                    t        |t        j                        rR|j                  j                  j
                  D cg c]  }|j                   }}t        j                  | |||||      S |j                  }|j                  }t        j                  ||d       t        j                  |j                        }t        j                  | |||||      }	t        j                  ||	d      S c c}w )aG  output = Cond(inputs) ?

  then_branch(inputs) : else_branch(inputs).

  Args:
    cond: A `Tensor`. A scalar. If the scalar is not a boolean, the scalar is
      converted to a boolean according to the following rule: if the scalar is a
        numerical value, non-zero means True and zero means False; if the scalar
        is a string, non-empty means True and empty means False.
    inputs: A list of input tensors.
    then_branch: A function takes 'inputs' and returns a list of tensors, whose
      types are the same as what else_branch returns.
    else_branch: A function takes 'inputs' and returns a list of tensors. whose
      types are the same as what then_branch returns.
    name: A name for the operation (optional).

  Returns:
    A list of tensors returned by either then_branch(inputs)
    or else_branch(inputs).
  r3   T)expand_composites)
isinstancer   _DefinedFunction
definition	signature
output_argtyper   _ifstructured_outputsr   r   rU   output_dtypesrz   )
condinputsthen_branchelse_branchr4   r`   tlistthen_outelse_outrets
             r'   Ifr   0  s    0 X667(33==HHIQVVIEI!!fe[+DB B ++(++( Xx4H
,,{00
1%
FE;$	@# 
		x	EE# Js   C/c                     |j                   j                  j                  D cg c]  }|j                   }}t	        | |||      S c c}w )a  Computes the gradient function for function f via backpropagation.

  Args:
    inputs: A list of tensors of size N + M.
    f: The function we want to compute the gradient for.  The function 'f' must
      be a numerical function which takes N inputs and produces M outputs. Its
      gradient function 'g', which is  a function taking N + M inputs and
      produces N outputs.  I.e. if we have (y1, y2, ..., yM) = f(x1, x2, ...,
      xN), then, g is (dL/dx1, dL/dx2, ..., dL/dxN) = g(x1, x2, ..., xN, dL/dy1,
      dL/dy2, ..., dL/dyM),  where L is a scalar-value function of (x1, x2, ...,
      xN) (e.g., the loss function). dL/dxi is the partial derivative of L with
      respect to xi.
    name: A name for the operation (optional).

  Returns:
    A list of tensors of size N.
  )inputToutfr4   )r   r   	input_argr   r   )r   r   r4   r`   r   s        r'   Gradientr   ]  sA    ( <<11;;
<a166
<%
<	eqt	DD =s   Ac                     t        | t        j                        r| j                  S t	        | j
                        t	        | j                        z
  }| j
                  d| }|D cg c]  }|j                   c}S c c}w )zGReturns the input dtypes of func, excluding dtypes for captured inputs.N)r   r   r   declared_input_typeslenr   captured_inputsr   )funcnum_non_captured_inputsinputs_without_capturedts       r'   _GetInputDtypesr   u  sj    h//0$$$  ,s43G3G/HH KK(@)@A2	3a!''	33	3s   %A;c                 n     t        j                  t               dd j                  z  i fd       }|S )zGReturns a wrapper for `func` that handles loop-carried captured inputs.	func_name
%s_Wrapperc                       |  }t        t        j                               }t        |t        j
                        r|S t        |t        t         f      s|f|z   S | t        |      |      z   S )4A wrapper that handles loop-carried captured inputs.)tupler   get_extra_argsr   r   	Operationlistr   )argsresult
extra_argsr   s      r'   Wrapperz(_LoopBodyCaptureWrapper.<locals>.Wrapper  sj     4[Fx..01J &#--(u.Y## ld6l:...r*   )r   Defunr   r4   )r   r   s   ` r'   _LoopBodyCaptureWrapperr     s;     >>?4(ML4994LM/ N/ 
.r*   c                    j                   rt        dj                          t              }t        |      |k7  rt        d| d       |j                   rt              |j                   D cg c]  }|j                   c}z   }t        j                  |ddj                  z  ifd       }t        j                  | |j                   z   |t        |      |      }	|	dt        |j                           }	nt        j                  | ||      }	|rt        j                         }
|
j                  j                  j                  |       |	d	   j                   j#                  d
|
       t        j                         }|j                  j                  j                  |       |	d	   j                   j#                  d|       |	S c c}w )ab  output = input; While (Cond(output)) { output = Body(output) }.

  Args:
    input_: A list of `Tensor` objects. A list of input tensors whose types are
      T.
    cond: . A function takes 'input' and returns a tensor.  If the tensor is a
      scalar of non-boolean, the scalar is converted to a boolean
      according to the following rule: if the scalar is a numerical value,
        non-zero means True and zero means False; if the scalar is a string,
        non-empty means True and empty means False. If the tensor is not a
        scalar, non-emptiness means True and False otherwise.
    body: . A function takes a list of tensors and returns another list tensors.
      Both lists have the same types as specified by T.
    name: A name for the operation (optional).
    hostmem: A list of integer. If i is in the list, input[i] is a host memory
      tensor.

  Raises:
    ValueError: if `cond` has implicitly captured inputs or if `cond` and `body`
      have different signatures.

  Returns:
    A list of `Tensor` objects. Has the same type as `input`.
    A list of output tensors whose types are T.
  zWThe 'cond' argument can not have implicitly captured inputs. Received captured_inputs: zJThe 'cond' and 'body' signatures do not match. Received: cond_input_types=z, body_input_types=r   r   c                  &     | dt                S )r   N)r   )r   body_input_typesr   s    r'   CondWrapperzWhile.<locals>.CondWrapper  s     4.-./00r*   r3   Nr   _input_hostmem_output_hostmem)r   
ValueErrorr   r   r   r   r   r4   r   _whiler   r   r   	AttrValuer=   extendr0   	_set_attr)input_r   bodyr4   hostmemcond_input_typesr   cond_dtypesr   r   
input_attroutput_attrr   s    `          @r'   Whiler     s   4 

	 001	34 4 %T*$T*))
	,--@
	 
 
'(--,, K ^^[EL499,DE1 F1 
#
#%%%%	C )D(())
*C

#
#FD$T
BC))+JOOW%FII(*5 **,Kg&FII);7	*5,s   7Gc                    t        j                  |      }t        j                  t        j                  t        j                  || z
        |z   dz
  t        j                        t        j                  |t        j                        z  t        j
                        }t        j
                  gdz  t        j                        dd z   }	dj                  z  }
t        j                  |	d|
id        }dj                  z  }t        j                  |	d|ifd       }|g d	|D cg c]  }d|z   	 c}z   }ng d	}t        d
|| |g|z   ||||      }t        |dt        |             S c c}w )z-Helper to implement a For loop using a While.r9      Nz%s_Condr   c                     ~| |k  S r,   rF   )r=   r&   r   s      r'   	WhileCondz!_ForUsingWhile.<locals>.WhileCond  s    q5Lr*   z%s_Bodyc                      || |z  z   g| }t        |t        j                        rd}nt        |t        j                        r|f}| dz   |||ft        |      z   S )zFA While wrapper for forbody that handles loop-carried captured inputs.rF   r9   )r   r   r   r	   Tensorr   )r=   r&   startdeltar   
for_resultforbodys         r'   	WhileBodyz!_ForUsingWhile.<locals>.WhileBody  sd     U*2T2J *cmm,j	J	.=jE1eU#eJ&777r*   )r   r9         r   )r   r   r   r4   r   )r   abscastr   float32int32r   r   r4   r   r   r   r   )r   limitr   r   r   r4   r   dr&   body_sig	cond_namer   	body_namer   r`   resultss       `           r'   _ForUsingWhiler    sY    ll5!mmmmX\\%%-014q86>>JmmAv~~&'(.6! ll^a$w'C'C"DQR"HH(',,&)>>81y1 2 ',,&)>>81y1
8 2
8 w7!q1u77GGE5!F*' 
gaG%	&& 8s   *E.c           	         |rt        | ||||||      S |j                  rKt        j                  | ||||j                  z   t	        |      |      }|dt        |j                          }nt        j                  | |||||      }|rd}	t        j                         }
|
j                  j                  j                  |D cg c]  }|	|z   	 c}       |d   j                  j                  d|
       t        j                         }|j                  j                  j                  |       |d   j                  j                  d|       |S c c}w )aM  out = input; for i in range(start, limit, delta) out = body(i, out).

  Args:
    start: A `Tensor` of type `int32`.
    limit: A `Tensor` of type `int32`.
    delta: A `Tensor` of type `int32`.
    inputs: A list of `Tensor` objects. A list of input tensors whose types are
      T.
    body: A function takes a list of tensors and returns another list of
      tensors. Both lists have the same types as (int32, T...).
    name: A name for the operation (optional).
    hostmem: A list of integer. If i is in the list, inputs[i] is a host memory
      tensor. In other words, (i+1)-th argument of the body function is
      expecting a host memory.
    rewrite_with_while: If True, using While op to implement the For.

  Returns:
    A list of `Tensor` objects. Has the same type as `input`.
    A list of output tensors whose types are T.
  r3   Nr   r   r   r   )r  r   r   _forr   r   r   r   r   r=   r   r0   r   )r   r  r   r   r   r4   r   rewrite_with_whiler   num_for_paramsr   r=   r   s                r'   Forr  +  s3   8 %vtT7KK	

!
!%%%%C )D(())
*C

!
!%vt$
OCN))+JOO'BQnq0BCFII(*5 **,Kg&FII);7	* Cs   E)N
   TFN)Nr  TFTFNr,   )NN)NNN),__doc__tensorflow.core.frameworkr   tensorflow.python.eagerr   tensorflow.python.frameworkr   r   r   r   r	   r
   tensorflow.python.opsr   r   r   r   r   rQ   r   (tensorflow.python.ops.gen_functional_opsr   r   tensorflow.python.utilr   r   r    tensorflow.python.util.tf_exportr   add_dispatch_supportr   deprecated_arg_valuesre   rg   rq   rr   r   r   r   r   r   r   r  r  rF   r*   r'   <module>r     sj    4 + 3 . 0 + . 4 + 4 * 2 6 , @ F . + ' 6 wi	  r  rj 7r	"""J
  !#?  ?D wi	  s  sl 7r	"""J
  !#?  ?D vh	 {%  {%| 6b	"""I
   "n  nd*FZE04.Cp ;'F 4r*   