
    UgI                         d Z ddlZddlmZmZ ddlmZmZ ddlmZ ddl	Z
ddlmZ ddlmZ dd	lmZmZmZ dd
lmZ ddlmZ ddlmZ ddlmZmZ ddlmZ d Z G d deee          ZdS )zBase class for mixture models.    N)ABCMetaabstractmethod)IntegralReal)time)	logsumexp   )cluster)BaseEstimatorDensityMixin_fit_context)kmeans_plusplus)ConvergenceWarning)check_random_state)Interval
StrOptions)check_is_fittedc                 ~    t          j        |           } | j        |k    rt          d|d|d| j                  dS )zValidate the shape of the input parameter 'param'.

    Parameters
    ----------
    param : array

    param_shape : tuple

    name : str
    zThe parameter 'z' should have the shape of z
, but got N)nparrayshape
ValueError)paramparam_shapenames      T/var/www/surfInsights/venv3-11/lib/python3.11/site-packages/sklearn/mixture/_base.py_check_shaper      sR     HUOOE{k!!jtt[[[%++/
 
 	
 "!    c                      e Zd ZU dZ eeddd          g eeddd          g eeddd          g eeddd          g eeddd          g eh d          gd	gd
gdg eeddd          gd
Ze	e
d<   d Zed             Zd Zed             Zd&dZ ed          d&d            Zd Zed             Zed             Zed             Zd Zd&dZd Zd Zd'dZd Zed              Zed!             Zd" Zd# Z d$ Z!d% Z"dS )(BaseMixturezBase class for mixture models.

    This abstract class specifies an interface for all mixture classes and
    provides basic common methods for mixture models.
       Nleft)closedg        r   >   kmeansrandomrandom_from_data	k-means++random_statebooleanverbose
n_componentstol	reg_covarmax_itern_initinit_paramsr(   
warm_startr*   verbose_interval_parameter_constraintsc                     || _         || _        || _        || _        || _        || _        || _        || _        |	| _        |
| _	        d S Nr+   )selfr,   r-   r.   r/   r0   r1   r(   r2   r*   r3   s              r   __init__zBaseMixture.__init__A   sT     )" &($ 0r   c                     dS )zCheck initial parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)
        N r7   Xs     r   _check_parameterszBaseMixture._check_parametersY   	     	r   c                 \   |j         \  }}| j        dk    rjt          j        || j        f          }t          j        | j        d|                              |          j        }d|t          j	        |          |f<   n| j        dk    rK|
                    || j        f          }||                    d          ddt          j        f         z  }n| j        dk    rWt          j        || j        f          }|                    || j        d	
          }d||t          j	        | j                  f<   n^| j        dk    rSt          j        || j        f          }t          || j        |          \  }}d||t          j	        | j                  f<   |                     ||           dS )a?  Initialize the model parameters.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        random_state : RandomState
            A random number generator instance that controls the random seed
            used for the method chosen to initialize the parameters.
        r$   r!   )
n_clustersr0   r(   r%   sizeaxisNr&   F)rB   replacer'   )r(   )r   r1   r   zerosr,   r
   KMeansfitlabels_arangeuniformsumnewaxischoicer   _initialize)r7   r<   r(   	n_samples_resplabelindicess           r   _initialize_parametersz"BaseMixture._initialize_parametersc   s    w	1x''8Y(9:;;D#0   Q  12D9%%u,--))''i9J-K'LLDDHH!H$$QQQ
]33DD!3338Y(9:;;D")) 15 *  G ;<D")D$566677,,8Y(9:;;D(!)  JAw
 ;<D")D$56667D!!!!!r   c                     dS )zInitialize the model parameters of the derived class.

        Parameters
        ----------
        X : array-like of shape  (n_samples, n_features)

        resp : array-like of shape (n_samples, n_components)
        Nr:   )r7   r<   rR   s      r   rO   zBaseMixture._initialize   s	     	r   c                 2    |                      ||           | S )a  Estimate model parameters with the EM algorithm.

        The method fits the model ``n_init`` times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for ``max_iter``
        times until the change of likelihood or lower bound is less than
        ``tol``, otherwise, a ``ConvergenceWarning`` is raised.
        If ``warm_start`` is ``True``, then ``n_init`` is ignored and a single
        initialization is performed upon the first call. Upon consecutive
        calls, training starts where it left off.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        self : object
            The fitted mixture.
        )fit_predictr7   r<   ys      r   rH   zBaseMixture.fit   s    6 	Ar   T)prefer_skip_nested_validationc                 `   |                      |t          j        t          j        gd          }|j        d         | j        k     r%t          d| j         d|j        d                    |                     |           | j        ot          | d           }|r| j
        nd}t          j         }d| _        t          | j                  }|j        \  }}t          |          D ]N}	|                     |	           |r|                     ||           |rt          j         n| j        }
| j        dk    r|                                 }d}hd}t          d| j        dz             D ]}|
}|                     |          \  }}|                     ||           |                     ||          }
|
|z
  }|                     ||           t3          |          | j        k     rd	} n|                     |
|           |
|k    s|t          j         k    r|
}|                                 }|}|| _        P| j        s%| j        dk    rt9          j        d
t<                     |                     |           || _         || _        |                     |          \  }}|!                    d          S )a  Estimate model parameters using X and predict the labels for X.

        The method fits the model n_init times and sets the parameters with
        which the model has the largest likelihood or lower bound. Within each
        trial, the method iterates between E-step and M-step for `max_iter`
        times until the change of likelihood or lower bound is less than
        `tol`, otherwise, a :class:`~sklearn.exceptions.ConvergenceWarning` is
        raised. After fitting, it predicts the most probable label for the
        input data points.

        .. versionadded:: 0.20

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        r	   )dtypeensure_min_samplesr   z:Expected n_samples >= n_components but got n_components = z, n_samples = 
converged_r!   FTzBest performing initialization did not converge. Try different init parameters, or increase max_iter, tol, or check for degenerate data.rC   )"_validate_datar   float64float32r   r,   r   r=   r2   hasattrr0   infr_   r   r(   range_print_verbose_msg_init_begrU   lower_bound_r/   _get_parameters_e_step_m_step_compute_lower_bound_print_verbose_msg_iter_endabsr-   _print_verbose_msg_init_endwarningswarnr   _set_parametersn_iter_argmax)r7   r<   rZ   do_initr0   max_lower_boundr(   rP   rQ   initlower_boundbest_paramsbest_n_iter	convergedn_iterprev_lower_boundlog_prob_normlog_respchanges                      r   rX   zBaseMixture.fit_predict   s   8 "*bj)AVWXX71:))),*.*;, , wqz, ,  
 	q!!! F74+F+FG '.Q6')$*;<<w	1&MM !	0 !	0D,,T222 =++A|<<<%,C26''$2CK}!!"2244!	#At}q'899  F'2$.2ll1oo+M8LLH---"&";";Hm"T"TK(+;;F44VVDDD6{{TX--$(	 . 00iHHH00Ow4N4N&1O"&"6"6"8"8K"(K&/DO
  	4=1#4#4M9 #   	[)))"+
 ll1oo8A&&&r   c                 ^    |                      |          \  }}t          j        |          |fS )a  E step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : float
            Mean of the logarithms of the probabilities of each sample in X

        log_responsibility : array, shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        )_estimate_log_prob_respr   mean)r7   r<   r}   r~   s       r   ri   zBaseMixture._e_step"  s1      #'">">q"A"Axw}%%x//r   c                     dS )a*  M step.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        log_resp : array-like of shape (n_samples, n_components)
            Logarithm of the posterior probabilities (or responsibilities) of
            the point of each sample in X.
        Nr:   )r7   r<   r~   s      r   rj   zBaseMixture._m_step5  s	     	r   c                     d S r6   r:   r7   s    r   rh   zBaseMixture._get_parametersC      r   c                     d S r6   r:   )r7   paramss     r   rq   zBaseMixture._set_parametersG  r   r   c                     t          |            |                     |d          }t          |                     |          d          S )a  Compute the log-likelihood of each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        log_prob : array, shape (n_samples,)
            Log-likelihood of each sample in `X` under the current model.
        Fresetr!   rC   )r   r`   r   _estimate_weighted_log_probr;   s     r   score_sampleszBaseMixture.score_samplesK  sK     	//99!<<1EEEEr   c                 P    |                      |                                          S )a  Compute the per-sample average log-likelihood of the given data X.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_dimensions)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        y : Ignored
            Not used, present for API consistency by convention.

        Returns
        -------
        log_likelihood : float
            Log-likelihood of `X` under the Gaussian mixture model.
        )r   r   rY   s      r   scorezBaseMixture.score^  s$    " !!!$$))+++r   c                     t          |            |                     |d          }|                     |                              d          S )a  Predict the labels for the data samples in X using trained model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        labels : array, shape (n_samples,)
            Component labels.
        Fr   r!   rC   )r   r`   r   rs   r;   s     r   predictzBaseMixture.predictq  sO     	////2299q9AAAr   c                     t          |            |                     |d          }|                     |          \  }}t          j        |          S )a  Evaluate the components' density for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)
            List of n_features-dimensional data points. Each row
            corresponds to a single data point.

        Returns
        -------
        resp : array, shape (n_samples, n_components)
            Density of each Gaussian component for each sample in X.
        Fr   )r   r`   r   r   exp)r7   r<   rQ   r~   s       r   predict_probazBaseMixture.predict_proba  sQ     	//221558vhr   c                     t                      |dk     rt          d j        z             j        j        \  }t           j                                      | j                  } j	        dk    r:t          j        fdt           j         j        |          D                       }nz j	        dk    r5t          j         fdt           j        |          D                       }n:t          j        fdt           j         j        |          D                       }t          j        d t          |          D                       }||fS )	ay  Generate random samples from the fitted Gaussian distribution.

        Parameters
        ----------
        n_samples : int, default=1
            Number of samples to generate.

        Returns
        -------
        X : array, shape (n_samples, n_features)
            Randomly generated sample.

        y : array, shape (nsamples,)
            Component labels.
        r!   zNInvalid value for 'n_samples': %d . The sampling requires at least one sample.fullc           	      `    g | ]*\  }}}                     ||t          |                    +S r:   )multivariate_normalint).0r   
covariancesamplerngs       r   
<listcomp>z&BaseMixture.sample.<locals>.<listcomp>  sG       2z6 ++D*c&kkJJ  r   tiedc           	      h    g | ].\  }}                     |j        t          |                    /S r:   )r   covariances_r   )r   r   r   r   r7   s      r   r   z&BaseMixture.sample.<locals>.<listcomp>  sH       &v ++D$2CS[[QQ  r   c                 x    g | ]6\  }}}|                     |f           t          j        |          z  z   7S )rA   )standard_normalr   sqrt)r   r   r   r   
n_featuresr   s       r   r   z&BaseMixture.sample.<locals>.<listcomp>  sa        3z6 ))
/C)DDgj))**  r   c                 L    g | ]!\  }}t          j        ||t                     "S ))r]   )r   r   r   )r   jr   s      r   r   z&BaseMixture.sample.<locals>.<listcomp>  s-    VVVyq&RWVQc***VVVr   )r   r   r,   means_r   r   r(   multinomialweights_covariance_typer   vstackzipr   concatenate	enumerate)r7   rP   rQ   n_samples_compr<   rZ   r   r   s   `     @@r   r   zBaseMixture.sample  s     	q==$'+'8:  
 ): !233DMBB6))	   69T%67 7   AA !V++	    *-dk>*J*J   AA 	     7:T%67 7	  	 	A NVVIn<U<UVVV
 
 1vr   c                 V    |                      |          |                                 z   S )a  Estimate the weighted log-probabilities, log P(X | Z) + log weights.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        weighted_log_prob : array, shape (n_samples, n_component)
        )_estimate_log_prob_estimate_log_weightsr;   s     r   r   z'BaseMixture._estimate_weighted_log_prob  s)     &&q))D,F,F,H,HHHr   c                     dS )zEstimate log-weights in EM algorithm, E[ log pi ] in VB algorithm.

        Returns
        -------
        log_weight : array, shape (n_components, )
        Nr:   r   s    r   r   z!BaseMixture._estimate_log_weights  r>   r   c                     dS )a9  Estimate the log-probabilities log P(X | Z).

        Compute the log-probabilities per each component for each sample.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob : array, shape (n_samples, n_component)
        Nr:   r;   s     r   r   zBaseMixture._estimate_log_prob  s	     	r   c                     |                      |          }t          |d          }t          j        d          5  ||ddt          j        f         z
  }ddd           n# 1 swxY w Y   ||fS )a@  Estimate log probabilities and responsibilities for each sample.

        Compute the log probabilities, weighted log probabilities per
        component and responsibilities for each sample in X with respect to
        the current state of the model.

        Parameters
        ----------
        X : array-like of shape (n_samples, n_features)

        Returns
        -------
        log_prob_norm : array, shape (n_samples,)
            log p(X)

        log_responsibilities : array, shape (n_samples, n_components)
            logarithm of the responsibilities
        r!   rC   ignore)underN)r   r   r   errstaterM   )r7   r<   weighted_log_probr}   r~   s        r   r   z#BaseMixture._estimate_log_prob_resp  s    & !<<Q??!"3!<<<[x((( 	H 	H(=BJ+GGH	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H 	H h&&s   A""A&)A&c                     | j         dk    rt          d|z             dS | j         dk    r3t          d|z             t                      | _        | j        | _        dS dS )(Print verbose message on initialization.r!   zInitialization %dr	   N)r*   printr   _init_prev_time_iter_prev_time)r7   r0   s     r   rf   z'BaseMixture._print_verbose_msg_init_beg  so    <1%./////\Q%.///#'66D #'#7D    r   c                     || j         z  dk    r^| j        dk    rt          d|z             dS | j        dk    r6t                      }t          d||| j        z
  |fz             || _        dS dS dS )r   r   r!   z  Iteration %dr	   z0  Iteration %d	 time lapse %.5fs	 ll change %.5fN)r3   r*   r   r   r   )r7   r{   diff_llcur_times       r   rl   z'BaseMixture._print_verbose_msg_iter_end  s    D))Q..|q  &/00000""66Hx$*>>HI   (0$$$ /. #"r   c           	          |rdnd}| j         dk    rt          d| d           d
S | j         dk    r3t                      | j        z
  }t          d| d|dd	|dd           d
S d
S )z.Print verbose message on the end of iteration.rz   zdid not converger!   zInitialization .r	   z. time lapse z.5fzs	 lower bound N)r*   r   r   r   )r7   lbinit_has_convergedconverged_msgts        r   rn   z'BaseMixture._print_verbose_msg_init_end+  s    '9Q?Q<14M44455555\Q--A-  aT         r   r6   )r!   )#__name__
__module____qualname____doc__r   r   r   r   r4   dict__annotations__r8   r   r=   rU   rO   rH   r   rX   ri   rj   rh   rq   r   r   r   r   r   r   r   r   r   rf   rl   rn   r:   r   r   r    r    +   s          "(AtFCCCDsD8889htS$v>>>?Xh4???@8Haf===>JLLLMM
 (( k;%Xh4GGGH$ $D   1 1 10   ^)" )" )"V 	 	 ^	   < \555g' g' g' 65g'R0 0 0&   ^   ^   ^F F F&, , , ,&B B B$     &< < < <|I I I   ^   ^' ' '48 8 80 0 0
 
 
 
 
r   r    )	metaclass)r   ro   abcr   r   numbersr   r   r   numpyr   scipy.specialr    r
   baser   r   r   r   
exceptionsr   utilsr   utils._param_validationr   r   utils.validationr   r   r    r:   r   r   <module>r      s]   $ $  ' ' ' ' ' ' ' ' " " " " " " " "           # # # # # #       < < < < < < < < < < % % % % % % + + + + + + & & & & & & : : : : : : : : . . . . . .
 
 
&J J J J J, J J J J J Jr   