When I run the following script I get the error message 'No gradients provided for any variable'. The 'grads' variable is a list of 'None' values. What is possibly going wrong in such a simple script?
import tensorflow as tf
import numpy as np
tf.enable_eager_execution()
class Model(tf.keras.Model):
def __init__(self):
super(Model, self).__init__()
self.layer = tf.keras.layers.Dense(4, activation = "linear")
def call(self, x):
y = self.layer(x)
return y
model = Model()
model._set_inputs(tf.zeros((1, 5)))
optimizer = tf.train.GradientDescentOptimizer(0.5)
# gibberish data
x_train = np.array([[0, 0, 0, 0, 1]], dtype=np.float32)
y_train = np.array([[0.1, 0.1, 0.4, 0.4]])
y_pred = model.call(x_train)
with tf.GradientTape() as tape:
loss = tf.losses.mean_squared_error(y_train, y_pred)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))