线性回归实现

import numpy as np


class LinerRegression():
    def __init__(self):
        pass

    def gen_data(self):
        x = np.random.normal(0, 1, 200).reshape(50, 4)
        y = x[:, 0] * 1 + x[:, 1] * 2 + x[:, 2] * 3 + x[:, 3] * 4 + np.random.normal(0, 1, x.shape[0])
        return x, y.reshape(-1, 1)

    def Normal_equation(self, x, y):
        xtxi = np.linalg.inv(np.matmul(np.transpose(x), x))
        xty = np.matmul(np.transpose(x), y)
        beta = np.matmul(xtxi, xty)
        print(beta)

    def GradientDescent(self,x,y):
        m, n = x.shape
        beta = np.ones((n, 1))
        print(beta)
        learning_rate = 0.001
        for i in range(500):
            xtx = np.matmul(np.transpose(x), x)
            xty = np.matmul(np.transpose(x), y)
            beta = beta - learning_rate*(np.matmul(xtx,beta) - xty)
        print(beta)


    def run(self):
        x, y = self.gen_data()
        # self.Normal_equation(x, y)
        self.GradientDescent(x, y)


if __name__ == '__main__':
    start = LinerRegression()
    start.run()

©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容