Char
Char

Reputation: 1735

Keras 2 Deconv2D layer

Trying to follow this implementation of a fully-convolutional densenet in Keras 2. I updated the function calls for some layers and updated the concatenate layer from

def transition_up(added, wd=0):
    x = concat(added)
    _, r, c, ch = x.get_shape().as_list()
    return Deconvolution2D(ch, 3, 3, (None,r*2,c*2,ch), init='he_uniform', 
                           border_mode='same', subsample=(2,2), W_regularizer=l2(wd))(x)

to

def transition_up(added, wd=0):
    x = concat(added)
    _, r, c, ch = x.get_shape().as_list()
    return Deconv2D(ch, (3, 3), input_shape=(r*2, c*2, ch), padding='same',
                    kernel_initializer='he_uniform', kernel_regularizer=l2(wd)) (x)

but an input doesn't double like it should after the down path. Here's the full code to create the model with Keras 2.

def conv(x, nf, sz, wd, p, stride=1):
    x = Conv2D(nf, (sz, sz), strides=(stride, stride), padding='same',
               kernel_initializer='he_uniform', kernel_regularizer=l2(wd)) (x)
    return dropout(x, p)

def conv_relu_bn(x, nf, sz=3, wd=0, p=0, stride=1):
    return conv(relu_bn(x), nf, sz, wd=wd, p=p, stride=stride)

def dense_block(n, x, growth_rate, p, wd):
    added = []
    for i in range(n):
        b = conv_relu_bn(x, growth_rate, p=p, wd=wd)
        print('x:', x, 'b:', b)
        x = concat([x, b])
        added.append(b)
    return x, added

def transition_dn(x, p, wd):
    return conv_relu_bn(x, x.get_shape().as_list()[-1], sz=1, p=p, wd=wd, stride=2)

def down_path(x, nb_layers, growth_rate, p, wd):
    skips = []
    for i, n in enumerate(nb_layers):
        x, added = dense_block(n, x, growth_rate, p, wd)
        skips.append(x)
        x = transition_dn(x, p=p, wd=wd)
    return skips, added

def transition_up(added, wd=0):
    x = concat(added)
    _, r, c, ch = x.get_shape().as_list()
    return Deconv2D(ch, (3, 3), input_shape=(r*2, c*2, ch), padding='same',
                    kernel_initializer='he_uniform', kernel_regularizer=l2(wd)) (x)

def up_path(added, skips, nb_layers, growth_rate, p, wd):
    for i, n in enumerate(nb_layers):

        x = transition_up(added, wd)
        x = concat([x, skips[i]])
        x, added = dense_block(n, x, growth_rate, p, wd)
    return x

def reverse(x):
    return list(reversed(x))

def create_tiramisu(nb_classes, img_input, nb_dense_block=6, growth_rate=16, nb_filter=48,
                    nb_layers_per_block=5, p=None, wd=0):
    if type(nb_layers_per_block) is list or type(nb_layers_per_block) is tuple:
        nb_layers = list(nb_layers_per_block)
    else:
        nb_layers = [nb_layers_per_block] * nb_dense_block

    x = conv(img_input, nb_filter, 3, wd, 0)
    skips, added = down_path(x, nb_layers, growth_rate, p, wd)
    x = up_path(added, reverse(skips[:-1]), reverse(nb_layers[:-1]), growth_rate, p, wd)
    x = conv(x, nb_classes, 1, wd, 0)
    _, r, c, f = x.get_shape().as_list()
    x = Reshape((-1, nb_classes)) (x)
    return Activation('softmax') (x)

input_shape = (224, 224, 3)
img_input = Input(shape=input_shape)
x = create_tiramisu(12, img_input, nb_layers_per_block=[4, 5, 7, 10, 12, 15], p=0.2, wd=1e-4)

This is the stacktrace when the it tries to create the model with Keras 2 syntax. One of the inputs to the concatenate layer doesn't get doubled from (None, 7, 7, 656) when it should be like (None, 14, 14, 656) at the beginning of the upsampling path. This doesn't happen with Keras 1 syntax, which I can also post.

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-133-fb96a61666a1> in <module>()
----> 1 x = create_tiramisu(12, img_input, nb_layers_per_block=[4, 5, 7, 10, 12, 15], p=0.2, wd=1e-4)

<ipython-input-131-f58faf58b799> in create_tiramisu(nb_classes, img_input, nb_dense_block, growth_rate, nb_filter, nb_layers_per_block, p, wd)
     62     print('p', p)
     63     print('wd', wd)
---> 64     x = up_path(added, reverse(skips[:-1]), reverse(nb_layers[:-1]), growth_rate, p, wd)
     65     x = conv(x, nb_classes, 1, wd, 0)
     66     _, r, c, f = x.get_shape().as_list()

<ipython-input-131-f58faf58b799> in up_path(added, skips, nb_layers, growth_rate, p, wd)
     38         print('up path', 'i:', i, 'n:', n)
     39         x = transition_up(added, wd)
---> 40         x = concat([x, skips[i]])
     41         x, added = dense_block(n, x, growth_rate, p, wd)
     42     return x

<ipython-input-130-c6d2942be960> in concat(xs)
      5 def bn(x): return BatchNormalization(axis=-1)(x)
      6 def relu_bn(x): return relu(bn(x))
----> 7 def concat(xs): return concatenate(xs, axis=-1)

/home/clu/anaconda3/lib/python3.6/site-packages/keras/layers/merge.py in concatenate(inputs, axis, **kwargs)
    389         A tensor, the concatenation of the inputs alongside axis `axis`.
    390     """
--> 391     return Concatenate(axis=axis, **kwargs)(inputs)
    392 
    393 

/home/clu/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in __call__(self, inputs, **kwargs)
    528                     self.build(input_shapes[0])
    529                 else:
--> 530                     self.build(input_shapes)
    531                 self.built = True
    532 

/home/clu/anaconda3/lib/python3.6/site-packages/keras/layers/merge.py in build(self, input_shape)
    158                              'inputs with matching shapes '
    159                              'except for the concat axis. '
--> 160                              'Got inputs shapes: %s' % (input_shape))
    161 
    162     def call(self, inputs):

ValueError: `Concatenate` layer requires inputs with matching shapes except for the concat axis. Got inputs shapes: [(None, 7, 7, 240), (None, 14, 14, 656)]

Upvotes: 2

Views: 2535

Answers (1)

marcopah
marcopah

Reputation: 866

Use a stride of 2 in the deconvolutional layer.

Upvotes: 2

Related Questions