1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79
| import matplotlib.pyplot as plt from matplotlib.animation import FuncAnimation
x = [150, 200, 250, 300, 350, 400, 600] y = [6450, 7450, 8450, 9450, 11450, 15450, 18450]
data = [[] for i in range(int(1E6))]
def LossFunctionX(theta0, theta1): los = 0 for i in range(len(x)): h = (theta0 + theta1 * x[i]) los = los + (h - y[i]) return los
def LossFunctionY(theta0, theta1): los = 0 for i in range(len(x)): h = (theta0 + theta1 * x[i]) los = los + (h - y[i]) * x[i] return los
def update(i): label = 'timestep {0}'.format(i) line.set_xdata([x[0], x[len(x) - 1]]) line.set_ydata(data[i]) ax.set_xlabel(label) return line, ax
def GradientDescent(theta0, theta1, iters, alphaX, alphaY): for i in range(iters): theta0 = theta0 - alphaX * LossFunctionX(theta0, theta1) / len(x) theta1 = theta1 - alphaY * LossFunctionY(theta0, theta1) / len(x)
plt.plot(x, y, 'bo') plt.plot([x[0], x[len(x) - 1]], [theta0 + theta1 * x[0], theta0 + theta1 * x[len(x) - 1]], 'b')
data[i].append(theta0 + theta1 * x[0]) data[i].append(theta0 + theta1 * x[len(x) - 1]) return theta0, theta1
if __name__ == '__main__': a, b = GradientDescent(0, 0, int(4E3), 1E-1, 1E-5)
plt.plot(x, y, 'bo') plt.plot([x[0], x[len(x) - 1]], [a + b * x[0], a + b * x[len(x) - 1]], 'r') plt.show()
fig, ax = plt.subplots() fig.set_tight_layout(True) ax.scatter(x, y) line, = ax.plot([x[0], 0], [x[len(x) - 1], 0], 'b') anim = FuncAnimation(fig, update, frames=10, interval=1000) anim.save('line.gif', dpi=100, writer='pillow')
print("theta0: {0}".format(a)) print("theta1: {0}".format(b))
|