我们从Python开源项目中,提取了以下5个代码示例,用于说明如何使用sklearn.decomposition.FactorAnalysis()。
def test_factor_component_analyzer(self): self.standard_check(FactorAnalysis)
def fit(self, X, feature_labels=None, estimator_params=None): """Fits an Sklearn FA model to X. Parameters ---------- X : array-like, shape (n_samples, n_features) Training data. feature_labels : array-like, shape (n_features), optional Labels for each of the features in X. estimator_params : dict, optional The parameters to pass to Sklearn's FA estimators. Returns ------- self """ self._reset() if feature_labels is None: feature_labels = ["feature_{}".format(i) for i in range(X.shape[1])] self.feature_labels_ = feature_labels self.model_ = SklearnFactorAnalysis() if estimator_params is not None: # Update Sklearn estimator params assert isinstance(estimator_params, dict) self.model_.set_params(**estimator_params) self.model_.fit(X) # Remove zero-valued components (n_components x n_features) components_mask = np.sum(self.model_.components_ != 0.0, axis=1) > 0.0 self.components_ = self.model_.components_[components_mask] # Compute the % variance explained (with/without noise) c2 = np.sum(self.components_ ** 2, axis=1) self.total_variance_ = np.sum(c2) self.pvars_ = 100 * c2 / self.total_variance_ self.pvars_noise_ = 100 * c2 / (self.total_variance_ + np.sum(self.model_.noise_variance_)) return self
def FA_results(data, n_comps=None): fa = FA(n_components=n_coms) model = fa.fit(data) out_data = {'model' : model, 'reconstruction error': fa.reconstruction_err_ } return 'FA', out_data
def compute_scores(X): pca = PCA(svd_solver='full') fa = FactorAnalysis() pca_scores, fa_scores = [], [] for n in n_components: pca.n_components = n fa.n_components = n pca_scores.append(np.mean(cross_val_score(pca, X))) fa_scores.append(np.mean(cross_val_score(fa, X))) return pca_scores, fa_scores
def __init__( self, n_iter=50, rank=None, auto_nuisance=True, n_nureg=None, nureg_zscore=True, nureg_method='PCA', baseline_single=False, logS_range=1.0, SNR_prior='exp', SNR_bins=21, rho_bins=20, tol=1e-4, optimizer='BFGS', minimize_options={'gtol': 1e-4, 'disp': False, 'maxiter': 20}, random_state=None, anneal_speed=10): self.n_iter = n_iter self.rank = rank self.auto_nuisance = auto_nuisance self.n_nureg = n_nureg self.nureg_zscore = nureg_zscore if auto_nuisance: assert (n_nureg is None) \ or (isinstance(n_nureg, int) and n_nureg > 0), \ 'n_nureg should be a positive integer or None'\ ' if auto_nuisance is True.' if self.nureg_zscore: self.preprocess_residual = lambda x: _zscore(x) else: self.preprocess_residual = lambda x: x if nureg_method == 'FA': self.nureg_method = lambda x: FactorAnalysis(n_components=x) elif nureg_method == 'PCA': self.nureg_method = lambda x: PCA(n_components=x, whiten=True) elif nureg_method == 'SPCA': self.nureg_method = lambda x: SparsePCA(n_components=x, max_iter=20, tol=tol) elif nureg_method == 'ICA': self.nureg_method = lambda x: FastICA(n_components=x, whiten=True) else: raise ValueError('nureg_method can only be FA, PCA, ' 'SPCA(for sparse PCA) or ICA') self.baseline_single = baseline_single if type(logS_range) is int: logS_range = float(logS_range) self.logS_range = logS_range assert SNR_prior in ['unif', 'lognorm', 'exp'], \ 'SNR_prior can only be chosen from ''unif'', ''lognorm''' \ ' and ''exp''' self.SNR_prior = SNR_prior self.SNR_bins = SNR_bins self.rho_bins = rho_bins self.tol = tol self.optimizer = optimizer self.minimize_options = minimize_options self.random_state = random_state self.anneal_speed = anneal_speed return