tensorflow v2.0入门教程——03线性回归
个人博客
本教程在理解线性回归原理的基础上。
线性回归
import tensorflow as tf
import numpy as np
# 一些参数
learning_rate = 0.01 # 学习率
training_steps = 1000 # 训练次数
display_step = 50 # 训练50次输出一次
# 训练数据
X = np.array([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167,
7.042,10.791,5.313,7.997,5.654,9.27,3.1])
Y = np.array([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221,
2.827,3.465,1.65,2.904,2.42,2.94,1.3])
n_samples = X.shape[0]
# 随机初始化权重和偏置
W = tf.Variable(np.random.randn(), name="weight")
b = tf.Variable(np.random.randn(), name="bias")
# 线性回归函数
def linear_regression(x):
return W*x + b
# 损失函数
def mean_square(y_pred, y_true):
return tf.reduce_sum(tf.pow(y_pred-y_true, 2)) / (2 * n_samples)
# 优化器采用随机梯度下降(SGD)
optimizer = tf.optimizers.SGD(learning_rate)
# 计算梯度,更新参数
def run_optimization():
# tf.GradientTape()梯度带,可以查看每一次epoch的参数值
with tf.GradientTape() as g:
pred = linear_regression(X)
loss = mean_square(pred, Y)
# 计算梯度
gradients = g.gradient(loss, [W, b])
# 更新W,b
optimizer.apply_gradients(zip(gradients, [W, b]))
# 开始训练
for step in range(1, training_steps+1):
run_optimization()
if step % display_step == 0:
pred = linear_regression(X)
loss = mean_square(pred, Y)
print("step: %i, loss: %f, W: %f, b: %f" % (step, loss, W.numpy(), b.numpy()))
输出结果
step: 50, loss: 0.122252, W: 0.372333, b: -0.056897
step: 100, loss: 0.117069, W: 0.365222, b: -0.006478
step: 150, loss: 0.112478, W: 0.358529, b: 0.040970
step: 200, loss: 0.108412, W: 0.352231, b: 0.085622
step: 250, loss: 0.104811, W: 0.346304, b: 0.127643
step: 300, loss: 0.101622, W: 0.340726, b: 0.167189
step: 350, loss: 0.098798, W: 0.335476, b: 0.204404
step: 400, loss: 0.096297, W: 0.330536, b: 0.239427
step: 450, loss: 0.094082, W: 0.325887, b: 0.272386
step: 500, loss: 0.092120, W: 0.321512, b: 0.303402
step: 550, loss: 0.090383, W: 0.317395, b: 0.332592
step: 600, loss: 0.088844, W: 0.313520, b: 0.360061
step: 650, loss: 0.087481, W: 0.309874, b: 0.385912
step: 700, loss: 0.086274, W: 0.306442, b: 0.410240
step: 750, loss: 0.085205, W: 0.303213, b: 0.433135
step: 800, loss: 0.084259, W: 0.300174, b: 0.454680
step: 850, loss: 0.083421, W: 0.297314, b: 0.474956
step: 900, loss: 0.082678, W: 0.294623, b: 0.494037
step: 950, loss: 0.082021, W: 0.292090, b: 0.511994
step: 1000, loss: 0.081438, W: 0.289706, b: 0.528893
可视化查看
import matplotlib.pyplot as plt
plt.plot(X, Y, 'ro', label='Original data')
plt.plot(X, np.array(W * X + b), label='Fitted line')
plt.legend()
plt.show()
个人博客
还没有评论,来说两句吧...