from __future__ import print_function, division
import numpy as np
from mlfromscratch.utils import calculate_covariance_matrix
class PCA():
"""A method for doing dimensionality reduction by transforming the feature
space to a lower dimensionality, removing correlation between features and
maximizing the variance along each feature axis. This class is also used throughout
the project to plot data.
"""
def transform(self, X, n_components):
""" Fit the dataset to the number of principal components specified in the
constructor and return the transformed dataset """
covariance_matrix = calculate_covariance_matrix(X)
# Where (eigenvector[:,0] corresponds to eigenvalue[0])
eigenvalues, eigenvectors = np.linalg.eig(covariance_matrix)
# Sort the eigenvalues and corresponding eigenvectors from largest
# to smallest eigenvalue and select the first n_components
idx = eigenvalues.argsort()[::-1]
eigenvalues = eigenvalues[idx][:n_components]
eigenvectors = np.atleast_1d(eigenvectors[:, idx])[:, :n_components]
# Project the data onto principal components
X_transformed = X.dot(eigenvectors)
return X_transformed
[Machine Learning From Scratch]-unsupervised_learning-principal_component_analysis
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。
推荐阅读更多精彩内容
- Now we have entered in the era of "big data". We have acc...