grad_descjavascript
import numpy as np
import matplotlib.pyplot as plt
points = np.genfromtxt("data.csv",delimiter=",")
#points
#提取points中的兩列數據,分別做爲x,y
x=points[:,0];
y=points[:,1];
#用plt畫出散點圖
plt.scatter(x,y)
plt.show()
# 損失函數是係數的函數,另外還要傳入數據的x,y
def compute_cost(w,b,points):
total_cost=0
M =len(points)
for i in range(M):
x=points[i,0]
y=points[i,1]
total_cost += (y-w*x-b)**2
return total_cost/M #一除都是浮點 兩個除號是地板除,整型。 如 3 // 4
alpha = 0.0000001
initial_w = 0
initial_b = 0
num_iter =20
def grad_desc(points,initial_w,initial_b,alpha,num_iter):
w = initial_w
b = initial_b
# 定義一個list保存全部的損失函數值,用來顯示降低過程。
cost_list=[]
for i in range(num_iter):
cost_list.append(compute_cost(w,b,points))
w,b= step_grad_desc(w,b,alpha,points)
return [w,b,cost_list]
def step_grad_desc(current_w,current_b,alpha,points):
sum_grad_w=0
sum_grad_b=0
M=len(points)
#對每一個點代入公式求和
for i in range(M):
x= points[i,0]
y= points[i,1]
sum_grad_w += (current_w * x +current_b -y) *x
sum_grad_b += current_w * x +current_b -y
#用公式求當前梯度
grad_w=2/M * sum_grad_w
grad_b=2/M * sum_grad_b
#梯度降低,更新當前的w和b
updated_w = current_w- alpha * grad_w
updated_b = current_b -alpha * grad_b
return updated_w,updated_b
w,b,cost_list= grad_desc(points,initial_w,initial_b,alpha,num_iter)
print ("w is :",w)
print ("b is :",b)
cost = compute_cost(w,b,points)
print("cost_list:",cost_list)
print("cost is:",cost)
plt.plot(cost_list)
plt.scatter(x,y)
pred_y= w*x+b
plt.plot(x,pred_y,c='r')