Reputation: 5412
I think it is easier to clarify what I need with a MWE (question is in the comment).
import tensorflow as tf
import numpy as np
class MLP:
def __init__(self, sizes, activations):
self.input = last_out = tf.placeholder(dtype=tf.float32, shape=[None, sizes[0]])
self.layers = []
for l, size in enumerate(sizes[1:]):
self.layers.append(last_out)
last_out = tf.layers.dense(last_out, size, activation=activations[l], kernel_initializer=tf.glorot_uniform_initializer())
self.layers.append(last_out)
def main():
session = tf.Session()
dim = 3
nn_sizes = [dim, 15, 1]
nn_activations = [tf.nn.tanh, tf.nn.tanh, tf.identity]
mynet = MLP(nn_sizes, nn_activations)
w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='mynet')
x1 = tf.placeholder(dtype=tf.float32, shape=[None, dim], name='x1')
x2 = tf.placeholder(dtype=tf.float32, shape=[None, dim], name='x2')
x3 = tf.placeholder(dtype=tf.float32, shape=[None, 1], name='x3')
myfun = tf.reduce_sum(tf.multiply(x3, new_tensor)
# new_tensor has to be the difference myfun(x2)-myfun(x3).
# However, the network is the same and its input variable has a different name.
# I would like to have something like:
# substitute(myfun,input,x1)
# substitute(myfun,input,x2)
# without duplicating the network.
optimizer = tf.contrib.opt.ScipyOptimizerInterface(myfun,var_list=w)
n = 1000
x1_samples = np.asmatrix(np.random.rand(n,dim))
x2_samples = np.asmatrix(np.random.rand(n,dim))
x3_samples = np.asmatrix(np.random.rand(n,1))
print(session.run(myfun, {x1: x1_samples, x2: x2_samples, x3: x3_samples}))
optimizer.minimize(session, {x1: x1_samples, x2: x2_samples, x3: x3_samples})
print(session.run(myfun, {x1: x1_samples, x2: x2_samples, x3: x3_samples}))
if __name__ == '__main__':
main()
Upvotes: 0
Views: 50
Reputation: 59701
Here's one approach (I assume there is a typo and what you want is x3 * (mynet(x2) - mynet(x1))
?):
import tensorflow as tf
import numpy as np
class MLP:
def __init__(self, x1, x2, sizes, activations):
x_sizes = [tf.shape(x1)[0], tf.shape(x2)[0]]
last_out = tf.concat([x1, x2], axis=0)
self.layers = []
for l, size in enumerate(sizes[1:]):
self.layers.append(last_out)
last_out = tf.layers.dense(last_out, size, activation=activations[l], kernel_initializer=tf.glorot_uniform_initializer())
self.layers.append(last_out)
self.x1_eval, self.x2_eval = tf.split(last_out, x_sizes, axis=0)
def main():
session = tf.Session()
dim = 3
nn_sizes = [dim, 15, 1]
nn_activations = [tf.nn.tanh, tf.nn.tanh, tf.identity]
w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='mynet')
x1 = tf.placeholder(dtype=tf.float32, shape=[None, dim], name='x1')
x2 = tf.placeholder(dtype=tf.float32, shape=[None, dim], name='x2')
x3 = tf.placeholder(dtype=tf.float32, shape=[None, 1], name='x3')
mynet = MLP(x1, x2, nn_sizes, nn_activations)
myfun = tf.reduce_sum(tf.multiply(x3, (mynet.x2_eval - mynet.x1_eval)))
optimizer = tf.contrib.opt.ScipyOptimizerInterface(myfun,var_list=w)
n = 1000
x1_samples = np.asmatrix(np.random.rand(n,dim))
x2_samples = np.asmatrix(np.random.rand(n,dim))
x3_samples = np.asmatrix(np.random.rand(n,1))
session.run(tf.global_variables_initializer())
print(session.run(myfun, {x1: x1_samples, x2: x2_samples, x3: x3_samples}))
optimizer.minimize(session, {x1: x1_samples, x2: x2_samples, x3: x3_samples})
print(session.run(myfun, {x1: x1_samples, x2: x2_samples, x3: x3_samples}))
if __name__ == '__main__':
main()
Upvotes: 1