def get_grad(model, x, y):
with tf.GradientTape() as tape:
loss = compute_loss(model, x, y, training=True)
to_update = [i for ix, i in enumerate(model.trainable_variables) if ix in (1, 3, 5, 7)]
return loss, tape.gradient(loss, to_update)
import tensorflow as tf
from tensorflow.keras.layers import Dense
from sklearn.datasets import load_iris
import numpy as np
X, y = load_iris(return_X_y=True)
X = X.astype(np.float32)
train = tf.data.Dataset.from_tensor_slices((X, y)).shuffle(25).batch(8)
model = tf.keras.Sequential([
Dense(16, activation='relu'),
Dense(32, activation='relu'),
Dense(64, activation='relu'),
Dense(3, activation='softmax')])
loss_object = tf.losses.SparseCategoricalCrossentropy(from_logits=False)
def compute_loss(model, x, y, training):
out = model(x, training=training)
loss = loss_object(y_true=y, y_pred=out)
return loss
def get_grad(model, x, y):
with tf.GradientTape() as tape:
loss = compute_loss(model, x, y, training=True)
to_update = [i for ix, i in enumerate(model.trainable_variables) if ix in (1, 3, 5, 7)]
return loss, tape.gradient(loss, to_update)
optimizer = tf.optimizers.Adam()
verbose = "Epoch {:2d} Loss: {:.3f} Acc: {:.2%}"
model.build(input_shape=([None, 4]))
weights_before = model.layers[0].get_weights()
for epoch in range(1, 25 + 1):
train_loss = tf.metrics.Mean()
train_acc = tf.metrics.SparseCategoricalAccuracy()
for x, y in train:
loss_value, grads = get_grad(model, x, y)
to_update = [i for ix, i in enumerate(model.trainable_variables) if ix in (1, 3, 5, 7)]
optimizer.apply_gradients(zip(grads, to_update))
train_loss.update_state(loss_value)
train_acc.update_state(y, model(x, training=True))
print(verbose.format(epoch,
train_loss.result(),
train_acc.result()))
weights_after = model.layers[0].get_weights()
应用渐变时,可以手动选择要更新的变量,如下所示:
它返回变量1、3、5、7,它们是偏差。是的,它确实有效:
完整代码:
相关问题 更多 >
编程相关推荐