
    -i              	       |    S r SSKJrJr  SSKrSSKJr  SSKJ	r	J
r
Jr  SSKJrJrJr  SSKJrJr   " S	 S
\
\\	\S9rg)z)Principal Component Analysis Base Classes    )ABCMetaabstractmethodN)linalg   )BaseEstimatorClassNamePrefixFeaturesOutMixinTransformerMixin)_fill_or_add_to_diagonaldeviceget_namespace)check_is_fittedvalidate_datac                   ^    \ rS rSrSrS rS r\SS j5       rS r	SS jr
S	 r\S
 5       rSrg)_BasePCA   zkBase class for PCA methods.

Warning: This class should not be used directly.
Use derived classes instead.
c           
         [        U R                  5      u  pU R                  nU R                  nU R                  (       a(  X1R	                  USS2[
        R                  4   5      -  nX@R                  -
  nUR                  X@R                  :  UUR                  S[        U5      UR                  S95      nUR                  U-  U-  n[        X`R                  U5        U$ )a3  Compute data covariance with the generative model.

``cov = components_.T * S**2 * components_ + sigma2 * eye(n_features)``
where S**2 contains the explained variances, and sigma2 contains the
noise variances.

Returns
-------
cov : array of shape=(n_features, n_features)
    Estimated covariance of data.
N        )r   dtype)r   components_explained_variance_whitensqrtnpnewaxisnoise_variance_whereasarrayr   r   Tr
   )selfxp_r   exp_varexp_var_diffcovs          N/var/www/html/venv/lib/python3.13/site-packages/sklearn/decomposition/_base.pyget_covariance_BasePCA.get_covariance   s     d../&&**;;%2::0F(GGK!5!55xx***JJs6'?'--JH

 }}|+{: &:&:B?
    c           
      r   [        U R                  5      u  pU R                  R                  S   nU R                  S:X  a  UR	                  U5      U R
                  -  $ U(       a  UR                  R                  nO[        R                  nU R
                  S:X  a  U" U R                  5       5      $ U R                  nU R                  nU R                  (       a(  XQR                  USS2[        R                  4   5      -  nX`R
                  -
  nUR                  X`R
                  :  UUR                  S[!        U5      S95      nXUR"                  -  U R
                  -  n[%        USU-  U5        UR"                  U" U5      -  U-  nXR
                  S-  * -  n[%        USU R
                  -  U5        U$ )a   Compute data precision matrix with the generative model.

Equals the inverse of the covariance but computed with
the matrix inversion lemma for efficiency.

Returns
-------
precision : array, shape=(n_features, n_features)
    Estimated precision of data.
   r   r   N)r   g      ?r   )r   r   shapen_components_eyer   r   invr&   r   r   r   r   r   r   r   r   r   r
   )	r   r    is_array_api_compliant
n_features
linalg_invr   r"   r#   	precisions	            r%   get_precision_BasePCA.get_precision5   s    &343C3C%D"%%++A.
 "66*%(<(<<<!JJ3&d11344 &&**;;%2::0F(GGK!5!55xx***JJs6'?J3

  --/$2F2FF	 C,,>CMMJy$99KG	++Q.//	 C$2F2F,FKr(   Nc                     g)aJ  Placeholder for fit. Subclasses should implement this method!

Fit the model with X.

Parameters
----------
X : array-like of shape (n_samples, n_features)
    Training data, where `n_samples` is the number of samples and
    `n_features` is the number of features.

Returns
-------
self : object
    Returns the instance itself.
N )r   Xys      r%   fit_BasePCA.fitb   s    r(   c                     [        XR                  U R                  5      u  p#[        U 5        [	        U UUR
                  UR                  /SSS9nU R                  XSS9$ )a#  Apply dimensionality reduction to X.

X is projected on the first principal components previously extracted
from a training set.

Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
    New data, where `n_samples` is the number of samples
    and `n_features` is the number of features.

Returns
-------
X_new : array-like of shape (n_samples, n_components)
    Projection of X in the first principal components, where `n_samples`
    is the number of samples and `n_components` is the number of the components.
)csrcscF)r   accept_sparsereset)r    x_is_centered)r   r   r   r   r   float64float32
_transform)r   r7   r    r!   s       r%   	transform_BasePCA.transformt   sa    $ a!1!143K3KL::rzz*(
 qu==r(   c                 d   XR                   R                  -  nU(       d5  XBR                  U R                  S5      U R                   R                  -  -  nU R                  (       aJ  UR                  U R                  5      nUR                  UR                  5      R                  nXeXV:  '   XE-  nU$ )N)r*   )
r   r   reshapemean_r   r   r   finfor   eps)r   r7   r    r@   X_transformedscale	min_scales          r%   rC   _BasePCA._transform   s    ,,... ZZ

G<t?O?O?Q?QQQM;;
 GGD445E-11I'0%#$"Mr(   c                    [        U5      u  p#U R                  (       aN  UR                  U R                  SS2[        R
                  4   5      U R                  -  nX-  U R                  -   $ XR                  -  U R                  -   $ )ai  Transform data back to its original space.

In other words, return an input `X_original` whose transform would be X.

Parameters
----------
X : array-like of shape (n_samples, n_components)
    New data, where `n_samples` is the number of samples
    and `n_components` is the number of components.

Returns
-------
X_original : array-like of shape (n_samples, n_features)
    Original data, where `n_samples` is the number of samples
    and `n_features` is the number of features.

Notes
-----
If whitening is enabled, inverse_transform will compute the
exact inverse operation, which includes reversing whitening.
N)r   r   r   r   r   r   r   rI   )r   r7   r    r!   scaled_componentss        r%   inverse_transform_BasePCA.inverse_transform   sr    , a ;;00BJJ?@4CSCSS  (4::55'''$**44r(   c                 4    U R                   R                  S   $ )z&Number of transformed output features.r   )r   r+   )r   s    r%   _n_features_out_BasePCA._n_features_out   s     %%a((r(   r6   )N)F)__name__
__module____qualname____firstlineno____doc__r&   r3   r   r9   rD   rC   rR   propertyrU   __static_attributes__r6   r(   r%   r   r      sJ    8+Z  ">>(5@ ) )r(   r   )	metaclass)r[   abcr   r   numpyr   scipyr   baser   r   r	   utils._array_apir
   r   r   utils.validationr   r   r   r6   r(   r%   <module>re      s9    /
 (   S S N N =z)#%5}PWz)r(   