
    AVh                     l    d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddl	m
Z
 ddlmZ dd
ZddZy	)zJacobian ops.    )ops)tensor)	array_ops)	check_ops)gradients_impl)control_flow_ops)nestNc                 :    t        j                  |       j                  }t        j                         }t        j                   dg        fd}	 t         j                  d         }|rt        j                  |||      }n0t        j                  | j                  gt              z  ||      }t        |      D ]  \  }	}
t        |
t        j                        s!t        j                   |t        j                  |
      dd gd      }t        j                  |
|      }
|
j#                  |j%                  |	   j                               |
||	<    t        j&                  ||      S # t        $ r t        j                         d   }Y *w xY w)a  Computes jacobian of `output` w.r.t. `inputs`.

  Args:
    output: A tensor.
    inputs: A tensor or a nested structure of tensor objects.
    use_pfor: If true, uses pfor for computing the jacobian. Else uses
      tf.while_loop.
    parallel_iterations: A knob to control how many iterations and dispatched in
      parallel. This knob can be used to control the total memory usage.

  Returns:
    A tensor or a nested structure of tensors with the same structure as
    `inputs`. Each entry is the jacobian of `output` w.r.t. to the corresponding
    value in `inputs`. If output has shape [y_1, ..., y_n] and inputs_i has
    shape [x_1, ..., x_m], the corresponding jacobian has shape
    [y_1, ..., y_n, x_1, ..., x_m]. Note that in cases where the gradient is
    sparse (IndexedSlices), jacobian function currently makes it dense and
    returns a Tensor instead. This may change in the future.
  c                 \    t        j                  |       }t        j                  |      S )Nr   gathergradient_ops	gradients)iyflat_inputsoutputs     \/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/ops/parallel_for/gradients.pyloop_fnzjacobian.<locals>.loop_fn2   s(    #A!!![11    r   parallel_iterations   Naxis)r	   flattenshaper   reshapeint	TypeErrorr   pforfor_loopdtypelen	enumerate
isinstancer   Tensorconcat	set_shapeconcatenatepack_sequence_as)r   inputsuse_pforr   output_tensor_shapeoutput_shaper   output_sizepfor_outputsr   out	new_shaper   s   `           @r   jacobianr5      so   ( V$+(,VbT*&2-fll1o&K #((2EGL $,,	[))/	1L ,' fa#v}}%""-ab1
2<ic9-c	mm'33KN4H4HIJl1o 
		v|	44+ 
 -//&)!,K-s   E5 5!FFc                      j                   }|d   j                  j                   d         s&t        d j                    dj                    d      |j                         r"t	        |d         }|j                         |z  }n2t        j                          }|d   }t        j                         |z  }t        j                         }t        j                  t        j                  ||d         g      5  t        j                   ||g       ddd        fd}|rt        j                  |||      }	n#t        j                  | j                   ||      }	|	yt        j                  |	||dg      }	t        j"                  |	g d	       t        j$                  ||d
d gd      }
t        j                   |
      S # 1 sw Y   xY w)aq  Computes and stacks jacobians of `output[i,...]` w.r.t. `input[i,...]`.

  e.g.
  x = tf.constant([[1, 2], [3, 4]], dtype=tf.float32)
  y = x * x
  jacobian = batch_jacobian(y, x)
  # => [[[2,  0], [0,  4]], [[6,  0], [0,  8]]]

  Args:
    output: A tensor with shape [b, y1, ..., y_n]. `output[i,...]` should
      only depend on `inp[i,...]`.
    inp: A tensor with shape [b, x1, ..., x_m]
    use_pfor: If true, uses pfor for computing the Jacobian. Else uses a
      tf.while_loop.
    parallel_iterations: A knob to control how many iterations are vectorized
      and dispatched in parallel. The default value of None, when use_pfor is
      true, corresponds to vectorizing all the iterations. When use_pfor is
      false, the default value of None corresponds to parallel_iterations=10.
      This knob can be used to control the total memory usage.

  Returns:
    A tensor `t` with shape [b, y_1, ..., y_n, x1, ..., x_m] where `t[i, ...]`
    is the jacobian of `output[i, ...]` w.r.t. `inp[i, ...]`, i.e. stacked
    per-example jacobians.

  Raises:
    ValueError: if first dimension of `output` and `inp` do not match.
  r   z(Need first dimension of `output` shape (z) and `inp` shape (z) to match.Nc                 f    t        j                  | d      }t        j                  |      d   S )Nr   r   r   r   )r   r   inpr   s     r   r   zbatch_jacobian.<locals>.loop_fn~   s/    +A!!!S)!,,r   r   r   )r   r      r   r   )r   is_compatible_with
ValueErroris_fully_definedr    num_elementsr   sizer   control_dependenciesr   assert_equalr   r   r"   r#   r$   	transposer)   )r   r8   r.   r   r0   
batch_sizeoutput_row_size	inp_shaper   pfor_outputr4   s   ``         r   batch_jacobianrF   P   s   : ,	a	+	+CIIaL	9
?~ N)),;@ A A""$\!_%J"//1Z?O??6*LaJnnV,
:Oooc")
j)A,78: Fv
O'DEFF- "''<OQK #++/1K !!+#2J"CE+{I6&im<1E)			69	---F Fs   <GG)TN)__doc__tensorflow.python.frameworkr   r   tensorflow.python.opsr   r   r   r   "tensorflow.python.ops.parallel_forr   tensorflow.python.utilr	   r5   rF    r   r   <module>rM      s+     + . + + @ ? '45n@.r   