代码拉取完成,页面将自动刷新
import numpy as np
import matplotlib.pyplot as plt
def fun(x, w, b):
return w * x + b
def loss(y, x, w, b):
return (y - fun(x, w, b)) ** 2
def compute_error(b, w, points):
total_error = 0
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
# mean-squared-error
total_error += loss(y, x, w, b)
# average loss
return total_error / float(len(points))
def gradient(b_current, w_current, points, learning_rate):
b_gradient = 0
w_gradient = 0
cnt = float(len(points))
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
# grad_b = 2(w * x + b - y)
b_gradient += (2 / cnt) * ((w_current * x + b_current) - y)
# grad_w = 2(w * x + b - y) * x
w_gradient += (2 / cnt) * x * ((w_current * x + b_current) - y)
new_b = b_current - (learning_rate * b_gradient)
new_w = w_current - (learning_rate * w_gradient)
return [new_b, new_w]
def lr(points, starting_b, starting_w, learning_rate, num_iterations):
b = starting_b
w = starting_w
# update for several times
for i in range(num_iterations):
b, w = gradient(b, w, np.array(points), learning_rate)
print('i = {}, b = {}, w = {}, error = {}'.format(i, b, w, compute_error(b, w, points)))
return [b, w]
def main():
m = 100
x = 3 * np.random.rand(m, 1)
y = 5 + 2 * x + np.random.rand(m, 1)
points = np.hstack((x, y))
print(points)
learning_rate = 0.001
initial_b = 2
initial_w = 1
num_iterations = 10000
print("Running...")
[b, w] = lr(points, initial_b, initial_w, learning_rate, num_iterations)
print("b = {0}, w = {1}, error = {2}".format(b, w, compute_error(b, w, points)))
plt.plot(points[:, 0], points[:, 1], "b.")
h = points[:, 0] * w + b
plt.plot(points[:, 0], h, 'r.')
plt.show()
if __name__ == '__main__':
main()
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。