machine learning - gradient descent python


---data.csv-----

52.211796692214001,79.64197304980874

39.299566694317065,59.171489321869508

48.10504169176825,75.331242297063056

52.550014442733818,71.300879886850353

45.419730144973755,55.165677145959123

54.351634881228918,82.478846757497919

44.164049496773352,62.008923245725825

58.16847071685779,75.392870425994957

56.727208057096611,81.43619215887864

48.955888566093719,60.723602440673965

44.687196231480904,82.892503731453715

60.297326851333466,97.379896862166078

45.618643772955828,48.847153317355072

38.816817537445637,56.877213186268506

66.189816606752601,83.878564664602763

65.41605174513407,118.59121730252249

47.48120860786787,57.251819462268969

41.57564261748702,51.391744079832307

51.84518690563943,75.380651665312357

59.370822011089523,74.765564032151374

57.31000343834809,95.455052922574737


-------------------------------------

from numpy import *

# y = mx + b

# m is slope, b is y-intercept

def compute_error_for_line_given_points(b, m, points):

totalError = 0

for i in range(0, len(points)):

x = points[i, 0]

y = points[i, 1]

totalError += (y - (m * x + b)) ** 2

return totalError / float(len(points))

def step_gradient(b_current, m_current, points, learningRate):

b_gradient = 0

m_gradient = 0

nlen = len(points)

print "---step_gradient: b_current={0} m_current={1}----".format(b_current, m_current)

N = float(len(points))

for i in range(0, nlen):

x = points[i, 0]

y = points[i, 1]

b_gradient += -(2/N) * (y - ((m_current * x) + b_current))

m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))

print "x={0},y={1},b_gradient={2},m_gradient={3}".format(x, y,b_gradient,m_gradient)

new_b = b_current - (learningRate * b_gradient)

new_m = m_current - (learningRate * m_gradient)

print "-------------new_b={0} = {1} - ({2} * {3})".format(new_b,b_current, learningRate, b_gradient)

print "-------------new_m={0} = {1} - ({2} * {3})".format(new_m,m_current, learningRate, m_gradient)

return [new_b, new_m]

def gradient_descent_runner(points, starting_b, starting_m, learning_rate, num_iterations):

b = starting_b

m = starting_m

for i in range(num_iterations):

b, m = step_gradient(b, m, array(points), learning_rate)

return [b, m]

def run():

points = genfromtxt("data.csv", delimiter=",")

learning_rate = 0.0005

initial_b = 0 # initial y-intercept guess

initial_m = 0 # initial slope guess

num_iterations = 200

print "len(points)={0}".format(len(points))

print "Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m, compute_error_for_line_given_points(initial_b, initial_m, points))

print "Running..."

[b, m] = gradient_descent_runner(points, initial_b, initial_m, learning_rate, num_iterations)

print "After {0} iterations b = {1}, m = {2}, error = {3}".format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points))

if __name__ == '__main__':

run()


最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
【社区内容提示】社区部分内容疑似由AI辅助生成,浏览时请结合常识与多方信息审慎甄别。
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容

  • 姓名:米芃 学号:16040520018 【嵌牛导读】深度学习发展到如今的地位,离不开下面这 6 段代码。本文介绍...
    南城城南阅读 7,208评论 0 4
  • 背景 一年多以前我在知乎上答了有关LeetCode的问题, 分享了一些自己做题目的经验。 张土汪:刷leetcod...
    土汪阅读 14,354评论 0 33
  • 幸福的童年/07 童年的幸福 你的快乐 就是全家的快乐 你的幸福 就是全家的幸福 你的成长 就是全家的希望 人生 ...
    723edf844d12阅读 1,904评论 2 8
  • 今天,久违的雪花肆意的飘散在天空,风景这般独好,像个没长大的孩子一般趴在窗户向外远望,缤纷的城市,来往的车流,匆匆...
    雯姗阅读 1,092评论 0 0
  • 公益广告:呼吁人们不要穿皮草,保护动物。 链接:伊能静全裸背后:GUCCI 宣布全面弃用皮草,亿万网友点赞! 早上...
    蓝瑟_2017阅读 1,716评论 0 0