Rxma
Rxma

Reputation: 312

How to use slice a Tensor by indexlist and compose a new Tensor

How to use tensorflow Circulation to slice a Tensor and compose a new Tensor just means:

    text_embeding =tf.constant(
                       #index 0       index 1      index 2
                   [[[0.1,0.2,0.3],[0.4,0.5,0.6],[0.1,0.2,0.3]], 
                    [[0.1,0.2,0.3],[0.4,0.5,0.6],[0.1,0.2,0.3]],
                    [[0.1,0.2,0.3],[0.4,0.5,0.6],[0.1,0.2,0.3]]
                   ] 
               )

I want to ask each tensor in the batch to get a new list of values according to the combination of indexes index_list = [[0,0],[1,1],[2,2],[0,1],[1,2],[0,2]]

i want get the value '''

[
    [
     [0.1,0.2,0.3 , 0.1,0.2,0.3], index0,0
     [0.4,0.5,0.6 , 0.4,0.5,0.6], index1,1
     [0.1,0.2,0.3 , 0.1,0.2,0.3], index2,2
     [0.1,0.2,0.3 , 0.4,0.5,0.6], index0,1
     [0.4,0.5,0.6 , 0.1,0.2,0.3], index1,2
     [0.1,0.2,0.3 , 0.1,0.2,0.3]  index0,2
    ],
    [
     [0.1,0.2,0.3 , 0.1,0.2,0.3], index0,0
     [0.4,0.5,0.6 , 0.4,0.5,0.6], index1,1
     [0.1,0.2,0.3 , 0.1,0.2,0.3], index2,2
     [0.1,0.2,0.3 , 0.4,0.5,0.6], index0,1
     [0.4,0.5,0.6 , 0.1,0.2,0.3], index1,2
     [0.1,0.2,0.3 , 0.1,0.2,0.3]  index0,2
    ],
    [
     [0.1,0.2,0.3 , 0.1,0.2,0.3], index0,0
     [0.4,0.5,0.6 , 0.4,0.5,0.6], index1,1
     [0.1,0.2,0.3 , 0.1,0.2,0.3], index2,2
     [0.1,0.2,0.3 , 0.4,0.5,0.6], index0,1
     [0.4,0.5,0.6 , 0.1,0.2,0.3], index1,2
     [0.1,0.2,0.3 , 0.1,0.2,0.3]  index0,2
    ]
   ]

''' my code is this,but batch_size=output_layer_sequence.shape[0] is None before session graph ready,here is error!

vsp = tf.batch_gather(output_layer_sequence, tf.tile([[j, j + i]],multiples=[output_layer_sequence.shape[0],1]))  # batch * 2 * hidden_size
for i in range(2):
    for j in range(2):
        vsp = tf.batch_gather(output_layer_sequence, tf.tile([[j, j + i]],multiples=[16,1]))  # batch * 2 * hidden_size
        # vsp = tf.batch_gather(output_layer_sequence, tf.tile([[j, j + i]],multiples=[output_layer_sequence.shape[0],1]))  # batch * 2 * hidden_size
        vsp_start, vsp_end = tf.split(vsp, 2, 1)  # batch * 1 * hiddensize
        vsp_start = tf.squeeze(vsp_start)  # batch  * hiddensize
             
        vsp_end = tf.squeeze(vsp_end)  # batch * hiddensize
        vsp = tf.concat([vsp_start, vsp_end], axis=-1, name='concat')  # [batch ,2*hiddensize]

        span_logits = tf.matmul(vsp, output_span_weight, transpose_b=True)  # output:[batch,class_labels]

        span_logits = tf.nn.bias_add(span_logits, output_span_bias)  # [batch,class_labels]
        span_logit_sum.append(span_logits)

thanks!

Upvotes: 1

Views: 70

Answers (1)

Andrey
Andrey

Reputation: 6377

Use tf.gather():

text_embeding =tf.constant(
                    #index 0       index 1      index 2
                [[[0.1,0.2,0.3],[0.4,0.5,0.6],[0.1,0.2,0.3]], 
                [[0.1,0.2,0.3],[0.4,0.5,0.6],[0.1,0.2,0.3]],
                [[0.1,0.2,0.3],[0.4,0.5,0.6],[0.1,0.2,0.3]]
                ] 
            )
index_list = tf.constant([[0,0],[1,1],[2,2],[0,1],[1,2],[0,2]])
output = tf.gather(text_embeding, index_list, axis=2)

Upvotes: 2

Related Questions