Reputation: 535
I got a custom keras Model which I want to optimize for hyperparameters while having a good tracking of whats going on and visualization. Therefor I want to pass hparams to the custom model like this:
class Model_hparams(tf.keras.Model):
def __init__(self, hparams):
super(Model_hparams, self).__init__()
self.hps = hparams
def build(self, inputs_shape):
self.conv1 = tf.keras.layers.Conv1D(filters=self.hps[HP_NUM_UNITS_1],
kernel_size=self.hps[HP_LEN_CONV_1],
activation='relu',
input_shape=inputs_shape[1:])
self.pool1 = tf.keras.layers.MaxPool1D(pool_size=2)
self.bn1 = tf.keras.layers.BatchNormalization()
self.dense1 = tf.keras.layers.Dense(1)
# actually, here are even more layers
def call(self, x, training=True):
x = self.conv1(x)
x = self.pool1(x)
x = self.bn1(x, training=training)
x = self.dense1(x)
return x
I followed the guide from TF:
from tensorboard.plugins.hparams import api as hp
HP_NUM_UNITS_1 = hp.HParam('num_units_1', hp.Discrete([16, 32]))
HP_LEN_CONV_1 = hp.HParam('len_conv_1', hp.Discrete([3]))
METRIC = 'mae'
with tf.summary.create_file_writer("../../model_output/hparams").as_default():
hp.hparams_config(
hparams=[HP_NUM_UNITS_1,
HP_LEN_CONV_1,],
metrics=[hp.Metric(METRIC, display_name='Test_MAE')],
)
def run(run_dir, hparams):
with tf.summary.create_file_writer(run_dir).as_default():
hp.hparams(hparams) # record the values used in this trial
test_mae = train_model(hparams)
tf.summary.scalar('Mean_Average_Error', test_mae, step=1)
Now my training fuction calls the model with my training procedure which looks like this (simplified):
def train_model(hparams):
model=Model_hparams(hparams)
for batch in dataset:
#...
with tf.GradientTape() as tape:
predictions = model(batch, training=True)
#...
The actual optimization starts here:
n=0
for num_units_1 in HP_NUM_UNITS_1.domain.values:
for len_conv_1 in HP_LEN_CONV_1.domain.values:
hparams = {HP_NUM_UNITS_1: num_units_1,
HP_LEN_CONV_1: len_conv_1}
run_name = "run-%d" % n
run("../../model_output/hparams/" + run_name, hparams)
n += 1
However, if I run this, an error occures when I want to instantiate my model:
<ipython-input-99-17dd66300f5b> in __init__(self, hparams)
72 def __init__(self, hparams):
73 super(Model_hparams, self).__init__()
---> 74 self.hps = hparams
75
76 def build(self, inputs_shape):
c:\users\123\anaconda3\envs\python_3_8_env1\lib\site-packages\tensorflow\python\keras\engine\training.py in __setattr__(self, name, value)
312 isinstance(v, (base_layer.Layer,
313 data_structures.TrackableDataStructure)) or
--> 314 base_layer_utils.has_weights(v) for v in nest.flatten(value)):
315 try:
316 self._base_model_initialized
c:\users\123\anaconda3\envs\python_3_8_env1\lib\site-packages\tensorflow\python\util\nest.py in flatten(structure, expand_composites)
339 return [None]
340 expand_composites = bool(expand_composites)
--> 341 return _pywrap_utils.Flatten(structure, expand_composites)
342
343
TypeError: '<' not supported between instances of 'HParam' and 'HParam'
I´m not sure why this happens and I cannot get it to work. I cannot find anything in the docs.
Is there anything I´m missing??
Thanks for the support.
Upvotes: 1
Views: 1035
Reputation: 2621
tf.keras.Model
class overrides __setattr__
function, so you can not set mismatched variables. However, you can bypass this function below trick.
object.__setattr__(self, 'hps', hparams)
.. instead of
self.hps = hparams
class Model_hparams(tf.keras.Model):
def __init__(self, hparams):
super(Model_hparams, self).__init__()
object.__setattr__(self, 'hps', hparams)
Upvotes: 1