Luke WIlliams
Luke WIlliams

Reputation: 53

ValueError: No gradients provided for any variable (Tensorflow)

I'm brand new to machine learning and tensorflow, so I'm working on a personal project right now to try to learn a bit. I'm getting this error right now, and all of my research online hasn't helped me narrow the problem down exactly. My best guess is that there's something going on under the hood that I can't recognize that's making my loss function non-differentiable, which I know is a problem for RNNs. I appreciate any help!

$ python3 Stocker.py APIKEY MSFT

ValueError: No gradients provided for any variable: ['lstm/lstm_cell/kernel:0', 'lstm/lstm_cell/recurrent_kernel:0', 'lstm/lstm_cell/bias:0', 'dense/kernel:0', 'dense/bias:0']

Code:

import pandas as pd
import numpy as np
import argparse
import tensorflow as tf
from matplotlib import pyplot

import data_helpers as dh

class Stocker:
    def __init__(self, training, test, loss='mse', optimizer='adam'):
        """ Creating Stocker instance immediately creates model 

            Model (WIP) is a two-layer LSTM. Defaults to Mean Squared Error
            loss function and ADAM optimizer function.
        """
        self.training_data = training
        self.test_data = test

        self.model = tf.keras.Sequential()
        self.model.add(tf.keras.layers.LSTM(100, activation='tanh', recurrent_activation='sigmoid', \
                                            input_shape=(training.shape[1], training.shape[2])))
        self.model.add(tf.keras.layers.Dense(5))
        self.model.compile(loss=loss, optimizer=optimizer)
        print(self.model.summary())

    def train(self):
        self.fit = self.model.fit(self.training_data, epochs=50, \
                            batch_size=100, \
                            validation_data=self.test_data, verbose=2, shuffle=False)

        pyplot.plot(self.fit['loss'], label='train')
        pyplot.plot(self.fit['val_loss'], label='test')
        pyplot.legend()
        pyplot.show()



if __name__ == '__main__':

    """ Test/Demo of Stocker module """

    parser = argparse.ArgumentParser(description="Model Training Script")
    parser.add_argument('key', help='User API Key')
    parser.add_argument('-outdir', metavar='out', default='/models/', help="Directory for stored model(s) (one for each symbol).")
    parser.add_argument('symbols', nargs=argparse.REMAINDER, help="List of symbols to train (Place all at end of command)")
    parse = parser.parse_args()

    data = {}

    for symbol in parse.symbols:

        # read historical daily data from alpha_vantage
        # store in python dict
        hist = dh.daily(symbol, parse.key, compact=False)
        data[symbol] = hist
        print(hist)
        #print()

        """ Data Preprocessing """ 

        # turn dataframe to numpy array
        tmp = hist.to_numpy()

        # split into training and testing sets 90-10
        split = round(tmp.shape[0]*1/10)
        test, training = tmp[:split], tmp[split:]

        test = dh.array_to_supervised(test, 3)
        test = test.reshape((test.shape[0], 1, test.shape[1]))

        training = dh.array_to_supervised(training, 3)
        training = training.reshape((training.shape[0], 1, training.shape[1]))

        # convert numpy arrays to tensors and reshape for LSTM
        training_tensor = tf.convert_to_tensor(training, np.float32)
        test_tensor = tf.convert_to_tensor(test, np.float32)

        """ -------------------------------- """
        #print(training_tensor)
        #print(test_tensor)

        model = Stocker(training_tensor, test_tensor)
        model.train()

Upvotes: 3

Views: 1834

Answers (1)

user11530462
user11530462

Reputation:

You get this error when you pass only the training data and missed to pass the labels in model.fit(). I was able to recreate your error using below code. You can download the dataset I am using in the program from here. I am using Tensorflow version 2.2.0.

Code to recreate the issue -

%tensorflow_version 2.x
# MLP for Pima Indians Dataset saved to single file
import numpy as np
from numpy import loadtxt
import tensorflow as tf
print(tf.__version__)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense

# load pima indians dataset
dataset = np.loadtxt("/content/pima-indians-diabetes.csv", delimiter=",")

# split into input (X) and output (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]

# define model
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))

# compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

# Model Summary
#model.summary()

# Fit the model
model.fit(X, epochs=150, batch_size=10, verbose=0)

Output -

2.2.0
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-4-7ddca8f2992e> in <module>()
     28 
     29 # Fit the model
---> 30 model.fit(X, epochs=150, batch_size=10, verbose=0)

10 frames
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    966           except Exception as e:  # pylint:disable=broad-except
    967             if hasattr(e, "ag_error_metadata"):
--> 968               raise e.ag_error_metadata.to_exception(e)
    969             else:
    970               raise

ValueError: in user code:

    /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:571 train_function  *
        outputs = self.distribute_strategy.run(
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:951 run  **
        return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:2290 call_for_each_replica
        return self._call_for_each_replica(fn, args, kwargs)
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:2649 _call_for_each_replica
        return fn(*args, **kwargs)
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:541 train_step  **
        self.trainable_variables)
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:1804 _minimize
        trainable_variables))
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:521 _aggregate_gradients
        filtered_grads_and_vars = _filter_grads(grads_and_vars)
    /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/optimizer_v2/optimizer_v2.py:1219 _filter_grads
        ([v.name for _, v in grads_and_vars],))

    ValueError: No gradients provided for any variable: ['dense_5/kernel:0', 'dense_5/bias:0', 'dense_6/kernel:0', 'dense_6/bias:0', 'dense_7/kernel:0', 'dense_7/bias:0'].

Solution - Pass the training labels in model.fit() and your error will be fixed.

Modified,

model.fit(X , epochs=150, batch_size=10, verbose=0)

to

model.fit(X , Y, epochs=150, batch_size=10, verbose=0)

Code -

%tensorflow_version 2.x
# MLP for Pima Indians Dataset saved to single file
import numpy as np
from numpy import loadtxt
import tensorflow as tf
print(tf.__version__)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense

# load pima indians dataset
dataset = np.loadtxt("/content/pima-indians-diabetes.csv", delimiter=",")

# split into input (X) and output (Y) variables
X = dataset[:,0:8]
Y = dataset[:,8]

# define model
model = Sequential()
model.add(Dense(12, input_dim=8, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))

# compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

# Model Summary
#model.summary()

# Fit the model
model.fit(X , Y, epochs=150, batch_size=10, verbose=0)

Output -

2.2.0
<tensorflow.python.keras.callbacks.History at 0x7f9208433eb8>

Hope this answers your question. Happy Learning.

Upvotes: 2

Related Questions