tf2
for step,(x_train,y_train) in enumerate(train_db):
with tf.GradientTape() as tape:
y = tf.matmul(x_train,w1) + b1
y = tf.nn.softmax(y)
y_ = tf.one_hot(y_train,depth=3)
loss = tf.reduce_mean(tf.square(y_ - y))
loss_all += loss
grads = tape.gradient(loss,[w1,b1])
w1.assign_sub(lr*grads[0])
b1.assign_sub(lr*grads[1])
print("Epoch{},loss:{}".format(epoch,loss_all/4))
train_loss_result.append(loss_all/4)
loss_all = 0
total_correct, total_number = 0, 0
for x_test,y_test in test_db:
y = tf.matmul(x_test, w1) + b1
y = tf.nn.softmax(y)
pred = tf.argmax(y, axis=1)
pred = tf.cast(pred,dtype=y_test.dtype)
correct = tf.cast(tf.equal(pred,y_test), dtype=tf.int32)
correct = tf.reduce_sum(correct)
total_correct += int(correct)
total_number += x_test.shape[0]
acc = total_correct / total_number
test_acc.append(acc)
print("Test_acc:",acc)