
    -ix:                         S r SSKrSSKJrJr  SSKrSSKJr	  SSK
Jr  SSKJrJrJrJr  SSKJrJr  SSKJr  SS	KJr  SS
KJrJr   " S S\\\5      rg)zRestricted Boltzmann Machine    N)IntegralReal)expit   )BaseEstimatorClassNamePrefixFeaturesOutMixinTransformerMixin_fit_context)check_random_stategen_even_slices)Interval)safe_sparse_dot)check_is_fittedvalidate_datac            
         ^  \ rS rSr% Sr\" \SSSS9/\" \SSSS9/\" \SSSS9/\" \SSSS9/S	/S
/S.r\	\
S'    SSSSSSS.S jjrS rS rS rS rS rS r\" SS9S S j5       rS rS r\" SS9S S j5       rU 4S jrSrU =r$ )!BernoulliRBM   a  Bernoulli Restricted Boltzmann Machine (RBM).

A Restricted Boltzmann Machine with binary visible units and
binary hidden units. Parameters are estimated using Stochastic Maximum
Likelihood (SML), also known as Persistent Contrastive Divergence (PCD)
[2].

The time complexity of this implementation is ``O(d ** 2)`` assuming
d ~ n_features ~ n_components.

Read more in the :ref:`User Guide <rbm>`.

Parameters
----------
n_components : int, default=256
    Number of binary hidden units.

learning_rate : float, default=0.1
    The learning rate for weight updates. It is *highly* recommended
    to tune this hyper-parameter. Reasonable values are in the
    10**[0., -3.] range.

batch_size : int, default=10
    Number of examples per minibatch.

n_iter : int, default=10
    Number of iterations/sweeps over the training dataset to perform
    during training.

verbose : int, default=0
    The verbosity level. The default, zero, means silent mode. Range
    of values is [0, inf].

random_state : int, RandomState instance or None, default=None
    Determines random number generation for:

    - Gibbs sampling from visible and hidden layers.

    - Initializing components, sampling from layers during fit.

    - Corrupting the data when scoring samples.

    Pass an int for reproducible results across multiple function calls.
    See :term:`Glossary <random_state>`.

Attributes
----------
intercept_hidden_ : array-like of shape (n_components,)
    Biases of the hidden units.

intercept_visible_ : array-like of shape (n_features,)
    Biases of the visible units.

components_ : array-like of shape (n_components, n_features)
    Weight matrix, where `n_features` is the number of
    visible units and `n_components` is the number of hidden units.

h_samples_ : array-like of shape (batch_size, n_components)
    Hidden Activation sampled from the model distribution,
    where `batch_size` is the number of examples per minibatch and
    `n_components` is the number of hidden units.

n_features_in_ : int
    Number of features seen during :term:`fit`.

    .. versionadded:: 0.24

feature_names_in_ : ndarray of shape (`n_features_in_`,)
    Names of features seen during :term:`fit`. Defined only when `X`
    has feature names that are all strings.

    .. versionadded:: 1.0

See Also
--------
sklearn.neural_network.MLPRegressor : Multi-layer Perceptron regressor.
sklearn.neural_network.MLPClassifier : Multi-layer Perceptron classifier.
sklearn.decomposition.PCA : An unsupervised linear dimensionality
    reduction model.

References
----------

[1] Hinton, G. E., Osindero, S. and Teh, Y. A fast learning algorithm for
    deep belief nets. Neural Computation 18, pp 1527-1554.
    https://www.cs.toronto.edu/~hinton/absps/fastnc.pdf

[2] Tieleman, T. Training Restricted Boltzmann Machines using
    Approximations to the Likelihood Gradient. International Conference
    on Machine Learning (ICML) 2008

Examples
--------

>>> import numpy as np
>>> from sklearn.neural_network import BernoulliRBM
>>> X = np.array([[0, 0, 0], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
>>> model = BernoulliRBM(n_components=2)
>>> model.fit(X)
BernoulliRBM(n_components=2)

For a more detailed example usage, see
:ref:`sphx_glr_auto_examples_neural_networks_plot_rbm_logistic_classification.py`.
   Nleft)closedr   neitherverboserandom_staten_componentslearning_rate
batch_sizen_iterr   r   _parameter_constraintsg?
   )r   r   r   r   r   c                L    Xl         X l        X0l        X@l        XPl        X`l        g Nr   )selfr   r   r   r   r   r   s          N/var/www/html/venv/lib/python3.13/site-packages/sklearn/neural_network/_rbm.py__init__BernoulliRBM.__init__   s%     )*$(    c                     [        U 5        [        XSS[        R                  [        R                  4S9nU R                  U5      $ )a  Compute the hidden layer activation probabilities, P(h=1|v=X).

Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
    The data to be transformed.

Returns
-------
h : ndarray of shape (n_samples, n_components)
    Latent representations of the data.
csrF)accept_sparseresetdtype)r   r   npfloat64float32_mean_hiddens)r#   Xs     r$   	transformBernoulliRBM.transform   s?     	5bjj"**=U
 !!!$$r'   c                 n    [        XR                  R                  5      nX R                  -  n[	        X"S9$ )a  Computes the probabilities P(h=1|v).

Parameters
----------
v : ndarray of shape (n_samples, n_features)
    Values of the visible layer.

Returns
-------
h : ndarray of shape (n_samples, n_components)
    Corresponding mean field values for the hidden layer.
out)r   components_Tintercept_hidden_r   )r#   vps      r$   r0   BernoulliRBM._mean_hiddens   s2     A//112	###Qr'   c                 \    U R                  U5      nUR                  UR                  S9U:  $ )a9  Sample from the distribution P(h|v).

Parameters
----------
v : ndarray of shape (n_samples, n_features)
    Values of the visible layer to sample from.

rng : RandomState instance
    Random number generator to use.

Returns
-------
h : ndarray of shape (n_samples, n_components)
    Values of the hidden layer.
size)r0   uniformshape)r#   r:   rngr;   s       r$   _sample_hiddensBernoulliRBM._sample_hiddens   s.      q!{{{(1,,r'   c                     [         R                  " XR                  5      nX0R                  -  n[	        X3S9  UR                  UR                  S9U:  $ )a9  Sample from the distribution P(v|h).

Parameters
----------
h : ndarray of shape (n_samples, n_components)
    Values of the hidden layer to sample from.

rng : RandomState instance
    Random number generator to use.

Returns
-------
v : ndarray of shape (n_samples, n_features)
    Values of the visible layer.
r5   r>   )r-   dotr7   intercept_visible_r   r@   rA   )r#   hrB   r;   s       r$   _sample_visiblesBernoulliRBM._sample_visibles   sG      FF1&&'	$$$a{{{(1,,r'   c                     [        XR                  5      * [        R                  " S[        XR                  R
                  5      U R                  -   5      R                  SS9-
  $ )zComputes the free energy F(v) = - log sum_h exp(-E(v,h)).

Parameters
----------
v : ndarray of shape (n_samples, n_features)
    Values of the visible layer.

Returns
-------
free_energy : ndarray of shape (n_samples,)
    The value of the free energy.
r   r   axis)r   rG   r-   	logaddexpr7   r8   r9   sum)r#   r:   s     r$   _free_energyBernoulliRBM._free_energy   sV      #:#:;;bllq"2"2"4"458N8NN?

#1#+ 	r'   c                     [        U 5        [        U S5      (       d  [        U R                  5      U l        U R                  XR                  5      nU R                  X R                  5      nU$ )a  Perform one Gibbs sampling step.

Parameters
----------
v : ndarray of shape (n_samples, n_features)
    Values of the visible layer to start from.

Returns
-------
v_new : ndarray of shape (n_samples, n_features)
    Values of the visible layer after one Gibbs step.
random_state_)r   hasattrr   r   rS   rC   rI   )r#   r:   h_v_s       r$   gibbsBernoulliRBM.gibbs   s]     	t_--!3D4E4E!FD!!!%7%78""2'9'9:	r'   T)prefer_skip_nested_validationc           	      B   [        U S5      (       + n[        XS[        R                  US9n[        U S5      (       d  [	        U R
                  5      U l        [        U S5      (       dl  [        R                  " U R                  R                  SSU R                  UR                  S   45      SS	9U l        U R                  R                  S   U l        [        U S
5      (       d%  [        R                  " U R                  5      U l        [        U S5      (       d(  [        R                  " UR                  S   5      U l        [        U S5      (       d1  [        R                  " U R                   U R                  45      U l        U R%                  XR                  5        g)aO  Fit the model to the partial segment of the data X.

Parameters
----------
X : ndarray of shape (n_samples, n_features)
    Training data.

y : array-like of shape (n_samples,) or (n_samples, n_outputs), default=None
    Target values (None for unsupervised transformations).

Returns
-------
self : BernoulliRBM
    The fitted model.
r7   r)   )r*   r,   r+   rS   r   {Gz?r   F)orderr9   rG   
h_samples_N)rT   r   r-   r.   r   r   rS   asarraynormalr   rA   r7   _n_features_outzerosr9   rG   r   r^   _fit)r#   r1   y
first_passs       r$   partial_fitBernoulliRBM.partial_fit  sG   " !}55
5

*
 t_--!3D4E4E!FDt]++!zz""))!TD4E4Eqwwqz3RS D $(#3#3#9#9!#<D t011%'XX!!&D" t122&(hh
'D# t\** hh9J9J'KLDO		!''(r'   c           	         U R                  U5      nU R                  U R                  U5      nU R                  U5      n[        U R                  5      UR
                  S   -  n[        UR                  USS9R                  nU[        R                  " UR                  U5      -  nU =R                  Xg-  -  sl
        U =R                  XcR                  SS9UR                  SS9-
  -  -  sl        U =R                  U[        R                  " UR                  SS95      R                  5       UR                  SS9-
  -  -  sl        SXRR!                  UR
                  S9U:  '   [        R"                  " XU5      U l        g)a7  Inner fit for one mini-batch.

Adjust the parameters to maximize the likelihood of v using
Stochastic Maximum Likelihood (SML).

Parameters
----------
v_pos : ndarray of shape (n_samples, n_features)
    The data to use for training.

rng : RandomState instance
    Random number generator to use for sampling.
r   T)dense_outputrL   g      ?r>   N)r0   rI   r^   floatr   rA   r   r8   r-   rF   r7   r9   rO   rG   r_   squeezer@   floor)r#   v_posrB   h_posv_negh_neglrupdates           r$   rc   BernoulliRBM._fit:  s@    ""5)%%doos;""5)4%%&Q7 %dCEE"&&%((BK'"		q	(9EII1I<M(M"NN2JJuyyay()113eiiQi6GG$
 	
 8;kku{{k+e34((50r'   c                 ,   [        U 5        [        XSSS9n[        U R                  5      n[        R
                  " UR                  S   5      UR                  SUR                  S   UR                  S   5      4n[        R                  " U5      (       a  SX$   -  S-   n[        U[        R                  5      (       a=  U[        R                  " UR                  R                  5       U4UR                  S9-   nOLU[        R                  " UR                  5       U4UR                  S9-   nOUR!                  5       nSXd   -
  Xd'   U R#                  U5      nU R#                  U5      nUR                  S   * [        R$                  " SX-
  * 5      -  $ )a  Compute the pseudo-likelihood of X.

Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
    Values of the visible layer. Must be all-boolean (not checked).

Returns
-------
pseudo_likelihood : ndarray of shape (n_samples,)
    Value of the pseudo-likelihood (proxy for likelihood).

Notes
-----
This method is not deterministic: it computes a quantity called the
free energy on X, then on a randomly corrupted version of X, and
returns the log of the logistic function of the difference.
r)   F)r*   r+   r   r   )rA   )r   r   r   r   r-   arangerA   randintspissparse
isinstancematrix
csr_matrixAravel	csr_arraycopyrP   rN   )	r#   r1   r:   rB   inddatarV   fefe_s	            r$   score_samplesBernoulliRBM.score_samplesX  s9   & 	$eD !2!23 yy$ckk!QWWQZ&LM;;q>>;?D$		**'<AGGLLtzz|S&9IIB"'kBGq!#
{R\\!sx[999r'   c           	         [        XS[        R                  [        R                  4S9nUR                  S   n[        U R                  5      n[        R                  " UR                  SSU R                  UR                  S   45      SUR                  S9U l        U R                  R                  S   U l        [        R                  " U R                  UR                  S9U l        [        R                  " UR                  S   UR                  S9U l        [        R                  " U R                   U R                  4UR                  S9U l        [%        [        R&                  " [)        U5      U R                   -  5      5      n[+        [-        XPR                   -  XSS	95      nU R.                  n[0        R0                  " 5       n[3        SU R4                  S-   5       H  n	U H  n
U R7                  X   U5        M     U(       d  M)  [0        R0                  " 5       n[9        S
[;        U 5      R<                  U	U R?                  U5      RA                  5       X-
  4-  5        UnM     U $ )aL  Fit the model to the data X.

Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
    Training data.

y : array-like of shape (n_samples,) or (n_samples, n_outputs), default=None
    Target values (None for unsupervised transformations).

Returns
-------
self : BernoulliRBM
    The fitted model.
r)   )r*   r,   r   r[   r   r\   )r]   r,   )r,   )	n_samplesz9[%s] Iteration %d, pseudo-likelihood = %.2f, time = %.2fs)!r   r-   r.   r/   rA   r   r   r_   r`   r   r,   r7   ra   rb   r9   rG   r   r^   intceilrj   listr   r   timeranger   rc   printtype__name__r   mean)r#   r1   rd   r   rB   	n_batchesbatch_slicesr   begin	iterationbatch_sliceends               r$   fitBernoulliRBM.fit  s   " $rzz2::>VWGGAJ	 !2!23::JJq$!2!2AGGAJ ?@''

  $//55a8!#$*;*;177!K"$((1771:QWW"E((DOOT5F5F#GqwwWi 04?? BCD	I7X
 ,,		q$++/2I+		!.#.  , wiikOT
++!**1-224	  3" r'   c                 l   > [         TU ]  5       nSUR                  l        SS/UR                  l        U$ )NTr.   r/   )super__sklearn_tags__
input_tagssparsetransformer_tagspreserves_dtype)r#   tags	__class__s     r$   r   BernoulliRBM.__sklearn_tags__  s4    w')!%1:I0F-r'   )ra   r   r7   r^   r9   rG   r   r   r   r   rS   r   )   r"   )r   
__module____qualname____firstlineno____doc__r   r   r   r   dict__annotations__r%   r2   r0   rC   rI   rP   rW   r
   rf   rc   r   r   r   __static_attributes____classcell__)r   s   @r$   r   r      s    gT "(AtFCD"4DCD!T&ABHaf=>;'($D  ) )"%("-&-*"* 5') 6')R1<':R 55 65n r'   r   )r   r   numbersr   r   numpyr-   scipy.sparser   rx   scipy.specialr   baser   r   r	   r
   utilsr   r   utils._param_validationr   utils.extmathr   utils.validationr   r   r    r'   r$   <module>r      sE    "
  "     8 . + =d24Dm dr'   