
    -i                     x    S SK Jr  S SKrSSKJrJr  SSKJr  SSK	J
r
Jr  SSKJrJr  SS	KJr   " S
 S\\5      rg)    )RealN   )BaseEstimator_fit_context)Interval)mean_variance_axismin_max_axis)check_is_fittedvalidate_data   )SelectorMixinc                   |   ^  \ rS rSr% SrS\" \SSSS9/0r\\	S'   SS	 jr
\" S
S9SS j5       rS rU 4S jrSrU =r$ )VarianceThreshold   a  Feature selector that removes all low-variance features.

This feature selection algorithm looks only at the features (X), not the
desired outputs (y), and can thus be used for unsupervised learning.

Read more in the :ref:`User Guide <variance_threshold>`.

Parameters
----------
threshold : float, default=0
    Features with a training-set variance lower than this threshold will
    be removed. The default is to keep all features with non-zero variance,
    i.e. remove the features that have the same value in all samples.

Attributes
----------
variances_ : array, shape (n_features,)
    Variances of individual features.

n_features_in_ : int
    Number of features seen during :term:`fit`.

    .. versionadded:: 0.24

feature_names_in_ : ndarray of shape (`n_features_in_`,)
    Names of features seen during :term:`fit`. Defined only when `X`
    has feature names that are all strings.

    .. versionadded:: 1.0

See Also
--------
SelectFromModel: Meta-transformer for selecting features based on
    importance weights.
SelectPercentile : Select features according to a percentile of the highest
    scores.
SequentialFeatureSelector : Transformer that performs Sequential Feature
    Selection.

Notes
-----
Allows NaN in the input.
Raises ValueError if no feature in X meets the variance threshold.

Examples
--------
The following dataset has integer features, two of which are the same
in every sample. These are removed with the default setting for threshold::

    >>> from sklearn.feature_selection import VarianceThreshold
    >>> X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]]
    >>> selector = VarianceThreshold()
    >>> selector.fit_transform(X)
    array([[2, 0],
           [1, 4],
           [1, 1]])
	thresholdr   Nleft)closed_parameter_constraintsc                     Xl         g N)r   )selfr   s     `/var/www/html/venv/lib/python3.13/site-packages/sklearn/feature_selection/_variance_threshold.py__init__VarianceThreshold.__init__N   s    "    T)prefer_skip_nested_validationc                    [        U US[        R                  SS9n[        US5      (       a2  [	        USS9u  o0l        U R                  S:X  a  [        USS9u  pEXT-
  nO?[        R                  " USS9U l        U R                  S:X  a  [        R                  " USS9nU R                  S:X  a<  [        R                  " U R
                  W/5      n[        R                  " USS9U l        [        R                  " [        R                  " U R
                  5      ) U R
                  U R                  :*  -  5      (       a>  SnUR                  S   S:X  a  US	-  n[        UR!                  U R                  5      5      eU $ )
a  Learn empirical variances from X.

Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
    Data from which to compute variances, where `n_samples` is
    the number of samples and `n_features` is the number of features.

y : any, default=None
    Ignored. This parameter exists only for compatibility with
    sklearn.pipeline.Pipeline.

Returns
-------
self : object
    Returns the instance itself.
)csrcscz	allow-nan)accept_sparsedtypeensure_all_finitetoarrayr   )axisz4No feature in X meets the variance threshold {0:.5f}r   z (X contains only one sample))r   npfloat64hasattrr   
variances_r   r	   nanvarptparraynanminallisfiniteshape
ValueErrorformat)	r   Xy_minsmaxespeak_to_peakscompare_arrmsgs	            r   fitVarianceThreshold.fitQ   s5   & (**)
 1i  !3AA!>A~~"*115 % ii2DO~~" "qq 1>>Q ((DOO]#CDK ii!<DO662;;t//4??dnn3TUVVHCwwqzQ66SZZ788r   c                 J    [        U 5        U R                  U R                  :  $ r   )r
   r(   r   )r   s    r   _get_support_mask#VarianceThreshold._get_support_mask   s    //r   c                 h   > [         TU ]  5       nSUR                  l        SUR                  l        U$ )NT)super__sklearn_tags__
input_tags	allow_nansparse)r   tags	__class__s     r   rA   "VarianceThreshold.__sklearn_tags__   s-    w')$(!!%r   )r   r(   )g        r   )__name__
__module____qualname____firstlineno____doc__r   r   r   dict__annotations__r   r   r:   r=   rA   __static_attributes____classcell__)rF   s   @r   r   r      s[    8v 	htQV<=$D # 50 60d0
 r   r   )numbersr   numpyr%   baser   r   utils._param_validationr   utils.sparsefuncsr   r	   utils.validationr
   r   _baser   r    r   r   <module>rY      s,      . . @ =  ~} ~r   