线性回归、逻辑回归的成本函数和梯度下降实现

import numpy as np

def compute_cost_linear(X,y,w,b,lambda_):
    m = X.shape[0] #提取样本数量
    fx = np.dot(X,w)+b
    cost =(1/(2*m))*np.sum((fx-y)**2)+(lambda_/(2*m))*np.sum(w**2)
    return cost

def compute_gradient_linear(X,y,w,b,lambda_):
    m = X.shape[0] #提取样本数量
    
    fx = np.dot(X,w)+b 
    error = fx - y
    dj_dw = (1/m)*np.dot(X.T,error)+(lambda_/m)*w
    dj_db = (1/m)*np.sum(error)
    return dj_dw,dj_db

def sigmoid(z):
    z = 1/(1+np.exp(-z))
    return z

def compute_cost_logistic(X,y,w,b,lambda_):
    z = np.dot(X,w)+b
    fx = sigmoid(z)
    cost =(-1/m)*np.sum(y*np.log(fx)+(1-y)*np.log(1-fx))+(lambda_/(2*m))*np.sum(w**2)
    return cost

def compute_gradient_logistic(X,y,w,b,lambda_):
    m = X.shape[0]
    z = np.dot(X,w)+b
    fx = sigmoid(z)
    error = fx - y
    dj_dw = (1/m)*np.dot(X.T,error)+(lambda_/m)*w
    dj_db = (1/m)*np.sum(error)
    return dj_dw,dj_db

def predict_class_logistic(X,w,b):
    m, n = X.shape   
    p = np.zeros(m)
    
    z = np.dot(X,w)+b
    fx = sigmoid(z)
    p = (fx >= 0.5).astype(int)
    return p
    

©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容