
    -i N                        S SK Jr  S SKrS SKJr  S SKJr  SSKJ	r	  SSK
JrJr  SSKJr  SS	KJr  SS
KJrJrJr  SSKJr  SSKJrJr  SSKJrJr  S rS rS$S jrS%S jr SSSSSSS.S jr!\" SS/S/\" S15      SS/\" \SSSS9/S/S/\S/S.SS 9SSSSSS!.S" j5       r"\" SS/S/\" S15      SS/\" \SSSS9/S/S/\S/S.SS 9SSSSSS!.S# j5       r#g)&    )IntegralN)issparse)digamma   )mutual_info_score)KDTreeNearestNeighbors)scale)check_random_state)Interval
StrOptionsvalidate_params)check_classification_targets)Paralleldelayed)check_array	check_X_yc                    U R                   nU R                  S5      n UR                  S5      n[        R                  " X45      n[	        SUS9nUR                  U5        UR                  5       S   n[        R                  " USS2S4   S5      n[        U SS9nUR                  XSS	S
9n[        R                  " U5      S-
  n[        USS9nUR                  XSS	S
9n	[        R                  " U	5      S-
  n	[        U5      [        U5      -   [        R                  " [        US-   5      5      -
  [        R                  " [        U	S-   5      5      -
  n
[        SU
5      $ )a  Compute mutual information between two continuous variables.

Parameters
----------
x, y : ndarray, shape (n_samples,)
    Samples of two continuous random variables, must have an identical
    shape.

n_neighbors : int
    Number of nearest neighbors to search for each point, see [1]_.

Returns
-------
mi : float
    Estimated mutual information in nat units. If it turned out to be
    negative it is replaced by 0.

Notes
-----
True mutual information can't be negative. If its estimate by a numerical
method is negative, it means (providing the method is adequate) that the
mutual information is close to 0 and replacing it by 0 is a reasonable
strategy.

References
----------
.. [1] A. Kraskov, H. Stogbauer and P. Grassberger, "Estimating mutual
       information". Phys. Rev. E 69, 2004.
   	chebyshev)metricn_neighborsr   Nr   )r   TF
count_onlyreturn_distanceg      ?r   )sizereshapenphstackr	   fit
kneighbors	nextafterr   query_radiusarrayr   meanmax)xyr   	n_samplesxynnradiuskdnxnymis              Y/var/www/html/venv/lib/python3.13/site-packages/sklearn/feature_selection/_mutual_info.py_compute_mi_ccr4      s>   < I			'A			'A	A6	B 
+	FBFF2J]]_QF\\&B-+F 
+	&B	tU	KB	"	B	+	&B	tU	KB	"	B 		
+
	
'''"q&/
"	# '''"q&/
"	#  q":    c                    U R                   S   nU R                  S5      n [        R                  " U5      n[        R                  " U5      n[        R                  " U5      n[	        5       n[        R
                  " U5       H  nX:H  n	[        R                  " U	5      n
U
S:  ag  [        X*S-
  5      nUR                  US9  UR                  X	   5        UR                  5       S   n[        R                  " USS2S4   S5      XI'   XU	'   XU	'   M     US:  n	[        R                  " U	5      nXY   nXi   nX	   n XI   n[        U 5      nUR                  XSSS	9n[        R                  " U5      n[        U5      [        R                   " [        U5      5      -   [        R                   " [        U5      5      -
  [        R                   " [        U5      5      -
  n[#        SU5      $ )
aC  Compute mutual information between continuous and discrete variables.

Parameters
----------
c : ndarray, shape (n_samples,)
    Samples of a continuous random variable.

d : ndarray, shape (n_samples,)
    Samples of a discrete random variable.

n_neighbors : int
    Number of nearest neighbors to search for each point, see [1]_.

Returns
-------
mi : float
    Estimated mutual information in nat units. If it turned out to be
    negative it is replaced by 0.

Notes
-----
True mutual information can't be negative. If its estimate by a numerical
method is negative, it means (providing the method is adequate) that the
mutual information is close to 0 and replacing it by 0 is a reasonable
strategy.

References
----------
.. [1] B. C. Ross "Mutual Information between Discrete and Continuous
   Data Sets". PLoS ONE 9(2), 2014.
r   r   r   )r   Nr   TFr   )shaper   r    emptyr	   uniquesummin
set_paramsr"   r#   r$   r   r%   r&   r   r'   r(   )cdr   r+   r.   label_countsk_allr-   labelmaskcountkrr/   m_allr2   s                   r3   _compute_mi_cdrG   S   s   @ 
I			'AXXi F88I&LHHYE		B1zt19K+AMMaM(FF17O"A<<!R%!4FL$K"T  !DtI%LKE	A\F	BOOA$ONEHHUOE 		
'''%.
!	"
''','
(	) '''%.
!	"  q":r5      c                     U(       a  U(       a  [        X5      $ U(       a  U(       d  [        XU5      $ U(       d  U(       a  [        XU5      $ [        XU5      $ )zCompute mutual information between two variables.

This is a simple wrapper which selects a proper function to call based on
whether `x` and `y` are discrete or not.
)r   rG   r4   )r)   r*   
x_discrete
y_discreter   s        r3   _compute_mirL      sE     j &&	JaK00JaK00aK00r5   c              #   d  #    Uc  [        U R                  S   5      n[        U 5      (       am  U Hf  n[        R                  " U R                  S   5      nU R
                  U   U R
                  US-      pTU R                  XE X0R                  XE '   Uv   Mh     gU H  nU SS2U4   v   M     g7f)a\  Iterate over columns of a matrix.

Parameters
----------
X : ndarray or csc_matrix, shape (n_samples, n_features)
    Matrix over which to iterate.

columns : iterable or None, default=None
    Indices of columns to iterate over. If None, iterate over all columns.

Yields
------
x : ndarray, shape (n_samples,)
    Columns of `X` in dense format.
Nr   r   )ranger7   r   r    zerosindptrdataindices)Xcolumnsir)   	start_ptrend_ptrs         r3   _iterate_columnsrX      s       
#{{A$A!"!ahhq1uow./ffY.GAii	*+G	  AAqD'M s   B.B0autoFTdiscrete_featuresdiscrete_targetr   copyrandom_staten_jobsc          
      .  ^^^ [        U TST(       + S9u  n mU R                  u  p[        U[        [        45      (       a]  [        U[        5      (       a  US:X  a  [        U 5      nO[        S5      e[        R                  " U	[        S9n
U
R                  U5        O:[        USS9nUR                  S:w  a  [        R                  " U	[        S9n
S	X'   OUn
U
) n[        R                  " U5      (       a  [        U 5      (       a  [        S
5      e[        U5      n[        R                  " U5      (       a  U R                  [        R                   US9n [#        U SS2U4   SSS9U SS2U4'   [        R$                  " S[        R&                  " [        R(                  " U SS2U4   5      SS95      nU SS2U4==   SU-  UR+                  U[        R,                  " U5      4S9-  -  ss'   T(       d_  [#        TSS9mTS[        R$                  " S[        R&                  " [        R(                  " T5      5      5      -  UR+                  US9-  -  m[/        US9" UUU4S j[1        [3        U 5      U
5       5       5      n[        R4                  " U5      $ )a0  Estimate mutual information between the features and the target.

Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
    Feature matrix.

y : array-like of shape (n_samples,)
    Target vector.

discrete_features : {'auto', bool, array-like}, default='auto'
    If bool, then determines whether to consider all features discrete
    or continuous. If array, then it should be either a boolean mask
    with shape (n_features,) or array with indices of discrete features.
    If 'auto', it is assigned to False for dense `X` and to True for
    sparse `X`.

discrete_target : bool, default=False
    Whether to consider `y` as a discrete variable.

n_neighbors : int, default=3
    Number of neighbors to use for MI estimation for continuous variables,
    see [1]_ and [2]_. Higher values reduce variance of the estimation, but
    could introduce a bias.

copy : bool, default=True
    Whether to make a copy of the given data. If set to False, the initial
    data will be overwritten.

random_state : int, RandomState instance or None, default=None
    Determines random number generation for adding small noise to
    continuous variables in order to remove repeated values.
    Pass an int for reproducible results across multiple function calls.
    See :term:`Glossary <random_state>`.

n_jobs : int, default=None
    The number of jobs to use for computing the mutual information.
    The parallelization is done on the columns of `X`.
    ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
    ``-1`` means using all processors. See :term:`Glossary <n_jobs>`
    for more details.

    .. versionadded:: 1.5


Returns
-------
mi : ndarray, shape (n_features,)
    Estimated mutual information between each feature and the target in
    nat units. A negative value will be replaced by 0.

References
----------
.. [1] A. Kraskov, H. Stogbauer and P. Grassberger, "Estimating mutual
       information". Phys. Rev. E 69, 2004.
.. [2] B. C. Ross "Mutual Information between Discrete and Continuous
       Data Sets". PLoS ONE 9(2), 2014.
csc)accept_sparse	y_numericrY   z+Invalid string value for discrete_features.)dtypeF)	ensure_2dboolTz1Sparse matrix `X` can't have continuous features.)r]   N)	with_meanr]   r   r   )axisg|=)r   )rg   )r_   c              3   Z   >#    U  H   u  p[        [        5      " UTUTT5      v   M"     g 7fN)r   rL   ).0r)   discrete_featurer\   r   r*   s      r3   	<genexpr>_estimate_mi.<locals>.<genexpr>=  s1      !#JA 	Q#3_kRR#Js   (+)r   r7   
isinstancestrrf   r   
ValueErrorr    r8   fillr   rd   rO   anyr   astypefloat64r
   maximumr'   absstandard_normalr:   r   ziprX   r&   )rS   r*   r[   r\   r   r]   r^   r_   r+   
n_featuresdiscrete_maskcontinuous_maskrngmeansr2   s    ` ``          r3   _estimate_mir      sE   J Qo:MNDAqGGI#c4[11'-- F*$,QK! !NOO48,-'(9UK""f,HHZt<M/3M,-M$nO	vvo8A;;LMM
\
*C	vvoHHRZZdH+ %a !U!
!_

 

1bggbffQq//A-B&C!LM	!_
!!	266/3J'K!LM	
 !u%	jjBGGBFF1I./0!!y!12	
 
	  !#&'7':M#J! 
B
 88B<r5   z
array-likezsparse matrixbooleanr   left)closedr^   rS   r*   r[   r   r]   r^   r_   )prefer_skip_nested_validation)r[   r   r]   r^   r_   c                "    [        U UUSUUUUS9$ )aF  Estimate mutual information for a continuous target variable.

Mutual information (MI) [1]_ between two random variables is a non-negative
value, which measures the dependency between the variables. It is equal
to zero if and only if two random variables are independent, and higher
values mean higher dependency.

The function relies on nonparametric methods based on entropy estimation
from k-nearest neighbors distances as described in [2]_ and [3]_. Both
methods are based on the idea originally proposed in [4]_.

It can be used for univariate features selection, read more in the
:ref:`User Guide <univariate_feature_selection>`.

Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
    Feature matrix.

y : array-like of shape (n_samples,)
    Target vector.

discrete_features : {'auto', bool, array-like}, default='auto'
    If bool, then determines whether to consider all features discrete
    or continuous. If array, then it should be either a boolean mask
    with shape (n_features,) or array with indices of discrete features.
    If 'auto', it is assigned to False for dense `X` and to True for
    sparse `X`.

n_neighbors : int, default=3
    Number of neighbors to use for MI estimation for continuous variables,
    see [2]_ and [3]_. Higher values reduce variance of the estimation, but
    could introduce a bias.

copy : bool, default=True
    Whether to make a copy of the given data. If set to False, the initial
    data will be overwritten.

random_state : int, RandomState instance or None, default=None
    Determines random number generation for adding small noise to
    continuous variables in order to remove repeated values.
    Pass an int for reproducible results across multiple function calls.
    See :term:`Glossary <random_state>`.

n_jobs : int, default=None
    The number of jobs to use for computing the mutual information.
    The parallelization is done on the columns of `X`.

    ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
    ``-1`` means using all processors. See :term:`Glossary <n_jobs>`
    for more details.

    .. versionadded:: 1.5

Returns
-------
mi : ndarray, shape (n_features,)
    Estimated mutual information between each feature and the target in
    nat units.

Notes
-----
1. The term "discrete features" is used instead of naming them
   "categorical", because it describes the essence more accurately.
   For example, pixel intensities of an image are discrete features
   (but hardly categorical) and you will get better results if mark them
   as such. Also note, that treating a continuous variable as discrete and
   vice versa will usually give incorrect results, so be attentive about
   that.
2. True mutual information can't be negative. If its estimate turns out
   to be negative, it is replaced by zero.

References
----------
.. [1] `Mutual Information
       <https://en.wikipedia.org/wiki/Mutual_information>`_
       on Wikipedia.
.. [2] A. Kraskov, H. Stogbauer and P. Grassberger, "Estimating mutual
       information". Phys. Rev. E 69, 2004.
.. [3] B. C. Ross "Mutual Information between Discrete and Continuous
       Data Sets". PLoS ONE 9(2), 2014.
.. [4] L. F. Kozachenko, N. N. Leonenko, "Sample Estimate of the Entropy
       of a Random Vector", Probl. Peredachi Inf., 23:2 (1987), 9-16

Examples
--------
>>> from sklearn.datasets import make_regression
>>> from sklearn.feature_selection import mutual_info_regression
>>> X, y = make_regression(
...     n_samples=50, n_features=3, n_informative=1, noise=1e-4, random_state=42
... )
>>> mutual_info_regression(X, y)
array([0.117, 2.645, 0.0287])
FrZ   )r   r   s          r3   mutual_info_regressionr   E  s*    h 		+!	 	r5   c                8    [        U5        [        U UUSUUUUS9$ )a  Estimate mutual information for a discrete target variable.

Mutual information (MI) [1]_ between two random variables is a non-negative
value, which measures the dependency between the variables. It is equal
to zero if and only if two random variables are independent, and higher
values mean higher dependency.

The function relies on nonparametric methods based on entropy estimation
from k-nearest neighbors distances as described in [2]_ and [3]_. Both
methods are based on the idea originally proposed in [4]_.

It can be used for univariate features selection, read more in the
:ref:`User Guide <univariate_feature_selection>`.

Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
    Feature matrix.

y : array-like of shape (n_samples,)
    Target vector.

discrete_features : 'auto', bool or array-like, default='auto'
    If bool, then determines whether to consider all features discrete
    or continuous. If array, then it should be either a boolean mask
    with shape (n_features,) or array with indices of discrete features.
    If 'auto', it is assigned to False for dense `X` and to True for
    sparse `X`.

n_neighbors : int, default=3
    Number of neighbors to use for MI estimation for continuous variables,
    see [2]_ and [3]_. Higher values reduce variance of the estimation, but
    could introduce a bias.

copy : bool, default=True
    Whether to make a copy of the given data. If set to False, the initial
    data will be overwritten.

random_state : int, RandomState instance or None, default=None
    Determines random number generation for adding small noise to
    continuous variables in order to remove repeated values.
    Pass an int for reproducible results across multiple function calls.
    See :term:`Glossary <random_state>`.

n_jobs : int, default=None
    The number of jobs to use for computing the mutual information.
    The parallelization is done on the columns of `X`.
    ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
    ``-1`` means using all processors. See :term:`Glossary <n_jobs>`
    for more details.

    .. versionadded:: 1.5

Returns
-------
mi : ndarray, shape (n_features,)
    Estimated mutual information between each feature and the target in
    nat units.

Notes
-----
1. The term "discrete features" is used instead of naming them
   "categorical", because it describes the essence more accurately.
   For example, pixel intensities of an image are discrete features
   (but hardly categorical) and you will get better results if mark them
   as such. Also note, that treating a continuous variable as discrete and
   vice versa will usually give incorrect results, so be attentive about
   that.
2. True mutual information can't be negative. If its estimate turns out
   to be negative, it is replaced by zero.

References
----------
.. [1] `Mutual Information
       <https://en.wikipedia.org/wiki/Mutual_information>`_
       on Wikipedia.
.. [2] A. Kraskov, H. Stogbauer and P. Grassberger, "Estimating mutual
       information". Phys. Rev. E 69, 2004.
.. [3] B. C. Ross "Mutual Information between Discrete and Continuous
       Data Sets". PLoS ONE 9(2), 2014.
.. [4] L. F. Kozachenko, N. N. Leonenko, "Sample Estimate of the Entropy
       of a Random Vector:, Probl. Peredachi Inf., 23:2 (1987), 9-16

Examples
--------
>>> from sklearn.datasets import make_classification
>>> from sklearn.feature_selection import mutual_info_classif
>>> X, y = make_classification(
...     n_samples=100, n_features=10, n_informative=2, n_clusters_per_class=1,
...     shuffle=False, random_state=42
... )
>>> mutual_info_classif(X, y)
array([0.589, 0.107, 0.196, 0.0968 , 0.,
       0.   , 0.   , 0.   , 0.     , 0.])
TrZ   )r   r   r   s          r3   mutual_info_classifr     s2    j !#		+!	 	r5   )rH   rj   )$numbersr   numpyr    scipy.sparser   scipy.specialr   metrics.clusterr   	neighborsr   r	   preprocessingr
   utilsr   utils._param_validationr   r   r   utils.multiclassr   utils.parallelr   r   utils.validationr   r   r4   rG   rL   rX   r   r   r    r5   r3   <module>r      sR     ! ! / 0 ! & K K ; . 5<~FR1 D 	xv O,^(&2I|L 1d6BC'(T" #'  	qqh O,^(&2I|L 1d6BC'(T" #'  	ssr5   