import numpy as np
from sklearn.linear_model import SGDRegressor
X = 2 * np.random.rand(100,1)
y = 4 + 3 * X +np.random.randn(100,1)
import matplotlib.pyplot as plt
plt.scatter(X,y)
plt.show()
#梯度下降回归
sgd_reg = SGDRegressor(max_iter = 100) #最大迭代次数
sgd_reg.fit(X,y.ravel())
print(sgd_reg.predict(1.5)) #预测
print("w0= ",sgd_reg.intercept_)
print("w1=",sgd_reg.coef_)
>>>
[8.52039601]
w0= [3.99348169]
w1= [3.01794288]
画个图
line_x = np.linspace(0,2,100)
line_y = sgd_reg.intercept_ + sgd_reg.coef_ * line_x
#默认生成的是(1,100)的行向量
line_y.reshape(100,) #把line_y由列向量变成行向量
plt.scatter(X,y)
plt.plot(line_x,line_y,'r--')