
    AVh-                         d Z ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlm	Z	 dd	lm
Z
 dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ dgZdZ edg       G d dej,                               Zy)z#The Multinomial distribution class.    )dtypes)ops)	array_ops)	check_ops)control_flow_ops)map_fn)math_ops)nn_ops)
random_ops)distribution)util)deprecation)	tf_exportMultinomiala  For each batch of counts, `value = [n_0, ...
,n_{k-1}]`, `P[value]` is the probability that after sampling `self.total_count`
draws from this Multinomial distribution, the number of draws falling in class
`j` is `n_j`. Since this definition is [exchangeable](
https://en.wikipedia.org/wiki/Exchangeable_random_variables); different
sequences have the same counts so the probability includes a combinatorial
coefficient.

Note: `value` must be a non-negative tensor with dtype `self.dtype`, have no
fractional components, and such that
`tf.reduce_sum(value, -1) = self.total_count`. Its shape must be broadcastable
with `self.probs` and `self.total_count`.zdistributions.Multinomial)v1c                       e Zd ZdZ ej
                  ddd      	 	 	 	 	 d fd	       Zed        Zed        Z	ed	        Z
d
 Zd Zd Zd ZddZ ej"                  e      d        Zd Zd Zd Zd Zd Zd Z xZS )r   a3
  Multinomial distribution.

  This Multinomial distribution is parameterized by `probs`, a (batch of)
  length-`K` `prob` (probability) vectors (`K > 1`) such that
  `tf.reduce_sum(probs, -1) = 1`, and a `total_count` number of trials, i.e.,
  the number of trials per draw from the Multinomial. It is defined over a
  (batch of) length-`K` vector `counts` such that
  `tf.reduce_sum(counts, -1) = total_count`. The Multinomial is identically the
  Binomial distribution when `K = 2`.

  #### Mathematical Details

  The Multinomial is a distribution over `K`-class counts, i.e., a length-`K`
  vector of non-negative integer `counts = n = [n_0, ..., n_{K-1}]`.

  The probability mass function (pmf) is,

  ```none
  pmf(n; pi, N) = prod_j (pi_j)**n_j / Z
  Z = (prod_j n_j!) / N!
  ```

  where:
  * `probs = pi = [pi_0, ..., pi_{K-1}]`, `pi_j > 0`, `sum_j pi_j = 1`,
  * `total_count = N`, `N` a positive integer,
  * `Z` is the normalization constant, and,
  * `N!` denotes `N` factorial.

  Distribution parameters are automatically broadcast in all functions; see
  examples for details.

  #### Pitfalls

  The number of classes, `K`, must not exceed:
  - the largest integer representable by `self.dtype`, i.e.,
    `2**(mantissa_bits+1)` (IEE754),
  - the maximum `Tensor` index, i.e., `2**31-1`.

  In other words,

  ```python
  K <= min(2**31-1, {
    tf.float16: 2**11,
    tf.float32: 2**24,
    tf.float64: 2**53 }[param.dtype])
  ```

  Note: This condition is validated only when `self.validate_args = True`.

  #### Examples

  Create a 3-class distribution, with the 3rd class is most likely to be drawn,
  using logits.

  ```python
  logits = [-50., -43, 0]
  dist = Multinomial(total_count=4., logits=logits)
  ```

  Create a 3-class distribution, with the 3rd class is most likely to be drawn.

  ```python
  p = [.2, .3, .5]
  dist = Multinomial(total_count=4., probs=p)
  ```

  The distribution functions can be evaluated on counts.

  ```python
  # counts same shape as p.
  counts = [1., 0, 3]
  dist.prob(counts)  # Shape []

  # p will be broadcast to [[.2, .3, .5], [.2, .3, .5]] to match counts.
  counts = [[1., 2, 1], [2, 2, 0]]
  dist.prob(counts)  # Shape [2]

  # p will be broadcast to shape [5, 7, 3] to match counts.
  counts = [[...]]  # Shape [5, 7, 3]
  dist.prob(counts)  # Shape [5, 7]
  ```

  Create a 2-batch of 3-class distributions.

  ```python
  p = [[.1, .2, .7], [.3, .3, .4]]  # Shape [2, 3]
  dist = Multinomial(total_count=[4., 5], probs=p)

  counts = [[2., 1, 1], [3, 1, 1]]
  dist.prob(counts)  # Shape [2]

  dist.sample(5) # Shape [5, 2, 3]
  ```
  z
2019-01-01zThe TensorFlow Distributions library has moved to TensorFlow Probability (https://github.com/tensorflow/probability). You should update all references to use `tfp.distributions` instead of `tf.distributions`.T)	warn_oncec           
      x   t        t                     }t        j                  ||||g      5 }t        j                  |d      | _        |r$t        j                  | j
                        | _        t        j                  ||d||      \  | _	        | _
        | j
                  dt        j                  f   | j                  z  | _        ddd       t        t        | C  | j                  j"                  t$        j&                  |||| j
                  | j                  | j                  g|       y# 1 sw Y   gxY w)	a  Initialize a batch of Multinomial distributions.

    Args:
      total_count: Non-negative floating point tensor with shape broadcastable
        to `[N1,..., Nm]` with `m >= 0`. Defines this as a batch of
        `N1 x ... x Nm` different Multinomial distributions. Its components
        should be equal to integer values.
      logits: Floating point tensor representing unnormalized log-probabilities
        of a positive event with shape broadcastable to
        `[N1,..., Nm, K]` `m >= 0`, and the same dtype as `total_count`. Defines
        this as a batch of `N1 x ... x Nm` different `K` class Multinomial
        distributions. Only one of `logits` or `probs` should be passed in.
      probs: Positive floating point tensor with shape broadcastable to
        `[N1,..., Nm, K]` `m >= 0` and same dtype as `total_count`. Defines
        this as a batch of `N1 x ... x Nm` different `K` class Multinomial
        distributions. `probs`'s components in the last portion of its shape
        should sum to `1`. Only one of `logits` or `probs` should be passed in.
      validate_args: Python `bool`, default `False`. When `True` distribution
        parameters are checked for validity despite possibly degrading runtime
        performance. When `False` invalid inputs may silently render incorrect
        outputs.
      allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
        (e.g., mean, mode, variance) use the value "`NaN`" to indicate the
        result is undefined. When `False`, an exception is raised if one or
        more of the statistic's batch members are undefined.
      name: Python `str` name prefixed to Ops created by this class.
    )valuestotal_count)nameT)logitsprobsmultidimensionalvalidate_argsr   .N)dtypereparameterization_typer   allow_nan_stats
parametersgraph_parentsr   )dictlocalsr   
name_scopeconvert_to_tensor_total_countdistribution_util$embed_check_nonnegative_integer_formget_logits_and_probs_logits_probsr   newaxis	_mean_valsuperr   __init__r   r   NOT_REPARAMETERIZED)	selfr   r   r   r   r   r   r   	__class__s	           _/home/dcms/DCMS/lib/python3.12/site-packages/tensorflow/python/ops/distributions/multinomial.pyr.   zMultinomial.__init__   s    T fhJ	k65%A	B Od//-Pd	BB!!# 	 #4"H"H%#dlDK ((i.?.?)?@4;;NdnO 
+t%kk , @ @#'((||{{$  & 	O Os   BD00D9c                     | j                   S )z,Number of trials used to construct a sample.)r%   r0   s    r2   r   zMultinomial.total_count   s         c                     | j                   S )z Vector of coordinatewise logits.)r)   r4   s    r2   r   zMultinomial.logits   s     <<r5   c                     | j                   S )z0Probability of drawing a `1` in that coordinate.)r*   r4   s    r2   r   zMultinomial.probs   s     ;;r5   c                 F    t        j                  | j                        d d S Nr   shaper,   r4   s    r2   _batch_shape_tensorzMultinomial._batch_shape_tensor   s    ??4>>*3B//r5   c                 Z    | j                   j                         j                  d      d d S N   r:   r,   	get_shapewith_rank_at_leastr4   s    r2   _batch_shapezMultinomial._batch_shape   s'    >>##%88;CR@@r5   c                 F    t        j                  | j                        dd  S r9   r;   r4   s    r2   _event_shape_tensorzMultinomial._event_shape_tensor   s    ??4>>*23//r5   c                 Z    | j                   j                         j                  d      dd  S r?   rA   r4   s    r2   _event_shapezMultinomial._event_shape   s'    >>##%88;BC@@r5   c                   
 t        j                  | j                  t        j                        }| j                         d   
t        j                  | j                  d   |j                        |z  }t        j                  |dt        j                  f   | j                  j                        | j                  z  }t        j                  |d
g      }t        j                  |dg      z  }
fd}t        j                  |||g| j                        }t        j                  |g d      }t        j                  g| j                         
ggd      }	t        j                  ||	      }|S )	N)r   r   ).r   .r:   c                     | d   | d   }}t        j                  |t        j                  df   |      }t        j                  |dg      }t        j                  t        j                  |      d      }|S )	Nr   r@   .r:   )r<   )depth)axis)r   multinomialr   r+   reshaper	   
reduce_sumone_hot)argsr   n_drawxknseeds       r2   _sample_singlez-Multinomial._sample_n.<locals>._sample_single  su    AwQff

 
 	(9(93(>!?!%'a


AaW
-a


i//;"
Eahr5   )r@   r      )perm)r	   castr   r   int32event_shape_tensorr   	ones_liker   r   r+   rO   r   	transposeconcatbatch_shape_tensor)r0   rV   rW   n_drawsr   flat_logitsflat_ndrawsrX   rT   final_shaperU   s    ``       @r2   	_sample_nzMultinomial._sample_n   sA   mmD,,FLLAG!!$A !!F7==24;<G  Y&&&'t{{/@/@BDHKKPF ##FRG4Ki''"66K 	k2jj	A
 	AI.A""QC)@)@)BQC#H!LK![)AHr5   c                 H    | j                  |      | j                  |      z
  S N)_log_unnormalized_prob_log_normalizationr0   countss     r2   	_log_probzMultinomial._log_prob  s#    &&v.1H1H1PPPr5   c                     | j                  |      }t        j                  |t        j                  | j
                        z  d      S r9   )_maybe_assert_valid_sampler	   rP   r
   log_softmaxr   rk   s     r2   ri   z"Multinomial._log_unnormalized_prob  s9    ,,V4Fv(:(:4;;(GGLLr5   c                 f    | j                  |      }t        j                  | j                  |       S rh   )ro   r&   log_combinationsr   rk   s     r2   rj   zMultinomial._log_normalization  s.    ,,V4F..t/?/?HHHr5   c                 @    t        j                  | j                        S rh   )r   identityr,   r4   s    r2   _meanzMultinomial._mean  s    dnn--r5   c           
      X   | j                   t        j                  | j                        dt        j                  f   z  }t        j
                  t        j                  | j                  dt        j                  f   |dt        j                  d d f          | j                               S N.)
r   r   r^   r   r+   matrix_set_diagr	   matmulr,   	_variancer0   ps     r2   _covariancezMultinomial._covariance"  s    

Y((y0002 	2A $$	NN3	 1 112c9$$a'(
* 	* 		 r5   c                     | j                   t        j                  | j                        dt        j                  f   z  }| j
                  | j
                  |z  z
  S rw   )r   r   r^   r   r+   r,   r{   s     r2   rz   zMultinomial._variance,  sQ    

Y((y0002 	2A>>DNNQ...r5   c           	          | j                   s|S t        j                  |      }t        j                  t        j                  | j                  t        j                  |d      d      g|      S )zBCheck counts for proper shape, values, then return tensor version.r:   z%counts must sum to `self.total_count`)message)
r   r&   r'   r   with_dependenciesr   assert_equalr   r	   rP   rk   s     r2   ro   z&Multinomial._maybe_assert_valid_sample1  sf    mCCFKF--h11&"=;	=/ 	 r5   )NNFTr   rh   )__name__
__module____qualname____doc__r   
deprecatedr.   propertyr   r   r   r=   rD   rF   rH   rf   r&   AppendDocstring_multinomial_sample_noterm   ri   rj   ru   r}   rz   ro   __classcell__)r1   s   @r2   r   r   3   s    ]~ ;'
  "#!99v      0A0AB %$$%=>Q ?QMI./
	r5   N)r   tensorflow.python.frameworkr   r   tensorflow.python.opsr   r   r   r   r	   r
   r   #tensorflow.python.ops.distributionsr   r   r&   tensorflow.python.utilr    tensorflow.python.util.tf_exportr   __all__r   Distributionr    r5   r2   <module>r      sr    * . + + + 2 ( * ( , < I . 6 
-  *+,F,++ F -Fr5   