Reputation: 336
When using transfer learning in Tensorflow, I know that one can lock layers from further training, by doing:
for layer in pre_trained_model.layers:
layer.trainable = False
Is it possible to lock specific filters in the layer instead? As in - if the whole layer contains 64 filters, is it possible to:
OR
Upvotes: 3
Views: 430
Reputation: 8595
One possible solution is to implement custom layer that splits convolution into separate number of filters
convolutions and sets each channel (which is a convolution with one output channel) to trainable
or to not trainable
. For example:
import tensorflow as tf
import numpy as np
class Conv2DExtended(tf.keras.layers.Layer):
def __init__(self, filters, kernel_size, **kwargs):
self.filters = filters
self.conv_layers = [tf.keras.layers.Conv2D(1, kernel_size, **kwargs) for _ in range(filters)]
super().__init__()
def build(self, input_shape):
_ = [l.build(input_shape) for l in self.conv_layers]
super().build(input_shape)
def set_trainable(self, channels):
"""Sets trainable channels."""
for i in channels:
self.conv_layers[i].trainable = True
def set_non_trainable(self, channels):
"""Sets not trainable channels."""
for i in channels:
self.conv_layers[i].trainable = False
def call(self, inputs):
results = [l(inputs) for l in self.conv_layers]
return tf.concat(results, -1)
And usage example:
inputs = tf.keras.layers.Input((28, 28, 1))
conv = Conv2DExtended(filters=4, kernel_size=(3, 3))
conv.set_non_trainable([1, 2]) # only channels 0 and 3 are trainable
res = conv(inputs)
res = tf.keras.layers.Flatten()(res)
res = tf.keras.layers.Dense(1, activation=tf.nn.sigmoid)(res)
model = tf.keras.models.Model(inputs, res)
model.compile(optimizer=tf.keras.optimizers.SGD(),
loss='binary_crossentropy',
metrics=['accuracy'])
model.fit(np.random.normal(0, 1, (10, 28, 28, 1)),
np.random.randint(0, 2, (10)),
batch_size=2,
epochs=5)
Upvotes: 3