Reputation: 67
I built and trained a model with Keras, and saved it with tensorflowjs converter (the tfjs.converters.save_keras_model() function).
Later, when trying to load it in tensorflowjs, I get the following error:
Error: The first layer in a Sequential model must get an 'inputShape' or 'batchInputShape' argument.
But after checking in the JSON file containing the model structure, there IS an input shape specified.
Any idea why tfjs cannot load it?
Could it be caused by the different variable names (batch_input_shape
in my JSON file and batchInputShape
in the error message).
Here is how I build and train the model :
model.add(LSTM(128, dropout=0.2, input_shape=(time_steps, input_dim) ))
model.add(Dense(output_dim, activation='sigmoid'))
model.compile(optimizer='adam', loss='mse', metrics=['accuracy'])
model.fit_generator(generator=train_generator,
steps_per_epoch=steps_per_epoch,
epochs=epochs,
validation_data=valid_generator,
validation_steps=valid_steps
)
Here is the JSON file. I don't know where the third null
variable comes from, but if I change it I get an error saying the number of dimensions is wrong.
"format": "layers-model",
"generatedBy": "keras v2.3.1",
"convertedBy": "TensorFlow.js Converter v1.4.0",
"modelTopology": {
"keras_version": "2.3.1",
"backend": "tensorflow",
"model_config": {
"class_name": "Sequential",
"config": {
"name": "sequential_1",
"layers": [
{
"class_name": "LSTM",
"config": {
"name": "lstm_1",
"trainable": true,
"batch_input_shape": [null, 10, 100],
"dtype": "float32",
"return_sequences": false,
"return_state": false,
"go_backwards": false,
"stateful": false,
"unroll": false,
"units": 128,
"activation": "tanh",
"recurrent_activation": "sigmoid",
"use_bias": true,
"kernel_initializer": {
"class_name": "VarianceScaling",
"config": {
"scale": 1.0,
"mode": "fan_avg",
"distribution": "uniform",
"seed": null
}
},
"recurrent_initializer": {"class_name": "Orthogonal", "config": {"gain": 1.0, "seed": null}},
"bias_initializer": {"class_name": "Zeros", "config": {}},
"unit_forget_bias": true,
"kernel_regularizer": null,
"recurrent_regularizer": null,
"bias_regularizer": null,
"activity_regularizer": null,
"kernel_constraint": null,
"recurrent_constraint": null,
"bias_constraint": null,
"dropout": 0.2,
"recurrent_dropout": 0.0,
"implementation": 2
}
},
{"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 29, "activation": "sigmoid", "use_bias": true, "kernel_initializer":
{"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}
]
}
},
"training_config": {
"optimizer_config": {
"class_name": "Adam",
"config": {
"learning_rate": 0.0010000000474974513,
"beta_1": 0.8999999761581421,
"beta_2": 0.9990000128746033,
"decay": 0.0,
"epsilon": 1e-07,
"amsgrad": false
}
},
"loss": "mse",
"metrics": ["accuracy"],
"weighted_metrics": null,
"sample_weight_mode": null,
"loss_weights": null
}
},
"weightsManifest": [{
"paths": ["group1-shard1of1.bin"],
"weights": [
{"name": "dense_1/kernel", "shape": [128, 29], "dtype": "float32"},
{"name": "dense_1/bias", "shape": [29], "dtype": "float32"},
{"name": "lstm_1/kernel", "shape": [100, 512], "dtype": "float32"},
{"name": "lstm_1/recurrent_kernel", "shape": [128, 512], "dtype": "float32"},
{"name": "lstm_1/bias", "shape": [512], "dtype": "float32"}
]
}]
}
Upvotes: 1
Views: 1139
Reputation: 119
I have had the same problem, I only uploaded the model.json file and not the bin file that is also created by the tensorflowjs_converter. Make sure you did upload the model.json file as well as the .bin file(s) in the same folder. The model.json file uses the .bin file to get the weights.
Upvotes: 1