Reputation: 293
The model is input shape of 2048 transfer values taken from inception model.
What I wanted to achieve is try to redo this code https://github.com/Hvass-Labs/TensorFlow-Tutorials/blob/master/08_Transfer_Learning.ipynb into Keras API.
Everything went smooth till I tried to save it.
When I try to save it, it raises TypeError: ('Not JSON Serializable:', Dimension(2048))
I am able to save other models without a problem.
I don't understand why this one doesn't work.
I tried to save it on Windows 10 with python_ver = 3.6, tensorflow_ver = 1.6rcu and Ubuntu 16.04 with python_ver = 3.6, tensorflow_ver = 1.3.
I created the model with the code below.
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.layers import InputLayer
from tensorflow.python.keras.layers import Dense
# Declare variables for model.
transfer_len = 2048
num_classes = 3
# Model creation.
model = Sequential()
# Input layer of shape 2048.
model.add(InputLayer(input_shape = (transfer_len,)))
# Fully connected 1024.
model.add(Dense(1024, activation='relu'))
# Output layer.
model.add(Dense(num_classes, activation='softmax'))
from tensorflow.python.keras.optimizers import Adam
optimizer = Adam(lr=1e-3)
model.compile(optimizer = optimizer,
loss = 'categorical_crossentropy',
metrics=['accuracy'])
model.fit(x = transfer_values_train,
y = labels_train,
epochs = 20, batch_size = 100, verbose=0)
output_path = "model.keras"
model.save(output_path)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-22-6a252d3d7102> in <module>()
----> 1 model.save(output_path)
~\Anaconda3\envs\gpu\lib\site-packages\tensorflow\python\keras\_impl\keras\engine\topology.py in save(self, filepath, overwrite, include_optimizer)
1044 """
1045 from tensorflow.python.keras._impl.keras.models import save_model # pylint: disable=g-import-not-at-top
-> 1046 save_model(self, filepath, overwrite, include_optimizer)
1047
1048 def save_weights(self, filepath, overwrite=True):
~\Anaconda3\envs\gpu\lib\site-packages\tensorflow\python\keras\_impl\keras\models.py in save_model(model, filepath, overwrite, include_optimizer)
131 'config': model.get_config()
132 },
--> 133 default=get_json_type).encode('utf8')
134
135 model_weights_group = f.create_group('model_weights')
~\Anaconda3\envs\gpu\lib\json\__init__.py in dumps(obj, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
236 check_circular=check_circular, allow_nan=allow_nan, indent=indent,
237 separators=separators, default=default, sort_keys=sort_keys,
--> 238 **kw).encode(obj)
239
240
~\Anaconda3\envs\gpu\lib\json\encoder.py in encode(self, o)
197 # exceptions aren't as detailed. The list call should be roughly
198 # equivalent to the PySequence_Fast that ''.join() would do.
--> 199 chunks = self.iterencode(o, _one_shot=True)
200 if not isinstance(chunks, (list, tuple)):
201 chunks = list(chunks)
~\Anaconda3\envs\gpu\lib\json\encoder.py in iterencode(self, o, _one_shot)
255 self.key_separator, self.item_separator, self.sort_keys,
256 self.skipkeys, _one_shot)
--> 257 return _iterencode(o, 0)
258
259 def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
~\Anaconda3\envs\gpu\lib\site-packages\tensorflow\python\keras\_impl\keras\models.py in get_json_type(obj)
113 return obj.__name__
114
--> 115 raise TypeError('Not JSON Serializable:', obj)
116
117 from tensorflow.python.keras._impl.keras import __version__ as keras_version # pylint: disable=g-import-not-at-top
TypeError: ('Not JSON Serializable:', Dimension(2048)
Upvotes: 2
Views: 2706
Reputation: 293
Okay, so the transfer_len variable was a type 'tensorflow.python.framework.tensor_shape.Dimension'.
Changed to int and it saves normally.
Upvotes: 2