
    ,h	                     @    S SK r S SK Jr  S SKJr  S/r " S S\5      rg)    N)Tensor)DistributionExponentialFamilyc                   ^    \ rS rSrSr\S\\S4   4S j5       rS r	\S\
4S j5       rS rS	rg
)r   
   a  
ExponentialFamily is the abstract base class for probability distributions belonging to an
exponential family, whose probability mass/density function has the form is defined below

.. math::

    p_{F}(x; \theta) = \exp(\langle t(x), \theta\rangle - F(\theta) + k(x))

where :math:`\theta` denotes the natural parameters, :math:`t(x)` denotes the sufficient statistic,
:math:`F(\theta)` is the log normalizer function for a given family and :math:`k(x)` is the carrier
measure.

Note:
    This class is an intermediary between the `Distribution` class and distributions which belong
    to an exponential family mainly to check the correctness of the `.entropy()` and analytic KL
    divergence methods. We use this class to compute the entropy and KL divergence using the AD
    framework and Bregman divergences (courtesy of: Frank Nielsen and Richard Nock, Entropies and
    Cross-entropies of Exponential Families).
return.c                     [         e)z^
Abstract method for natural parameters. Returns a tuple of Tensors based
on the distribution
NotImplementedErrorselfs    V/var/www/html/shao/venv/lib/python3.13/site-packages/torch/distributions/exp_family.py_natural_params!ExponentialFamily._natural_params   
     "!    c                     [         e)zk
Abstract method for log normalizer function. Returns a log normalizer based on
the distribution and input
r
   )r   natural_paramss     r   _log_normalizer!ExponentialFamily._log_normalizer'   s
    
 "!r   c                     [         e)zX
Abstract method for expected carrier measure, which is required for computing
entropy.
r
   r   s    r   _mean_carrier_measure'ExponentialFamily._mean_carrier_measure.   r   r   c                    U R                   * nU R                   Vs/ sH   o"R                  5       R                  5       PM"     nnU R                  " U6 n[
        R                  R                  UR                  5       USS9nX-  n[        X55       H7  u  pgXU-  R                  U R                  S-   5      R                  S5      -  nM9     U$ s  snf )zO
Method to compute the entropy using Bregman divergence of the log normalizer.
T)create_graph)r   )r   r   detachrequires_grad_r   torchautogradgradsumzipreshape_batch_shape)r   resultpnparams	lg_normal	gradientsnpgs           r   entropyExponentialFamily.entropy6   s     ,,,8<8L8LM8L188:,,.8LM(('2	NN''	t'T	,EBAv&&t'8'85'@AEEbIIF - Ns   &C N)__name__
__module____qualname____firstlineno____doc__propertytupler   r   r   floatr   r-   __static_attributes__r/   r   r   r   r   
   sP    ( "vs{!3 " "" "u " "r   )r   r    torch.distributions.distributionr   __all__r   r/   r   r   <module>r;      s$      9 
7 7r   