Reputation: 11
I'm quite new to using Tensorflow, and imagine someone will quickly tell me I'm doing something stupid so here goes.
I'm working with the MSTAR dataset and trying to get it read in. The files have a very strange format, but suffice it to say that if eager execution is on the following code reads and displays an image from the dataset.
import tensorflow as tf
import matplotlib.pyplot as plt
tf.enable_eager_execution()
img1Path='HB15000.018'
img2Path='HB15001.018'
def pathToImgTF(path):
with tf.io.gfile.GFile(path,'rb') as filePath:
step1=filePath.readlines()
step2=[x.strip(b'\n') for x in step1]
for x in step2:
if b'PhoenixHeaderLength' in x:
line=x.strip().split(b'=')
PhoenixHeaderLength=int(line[1])
elif b'native_header_length' in x:
line=x.strip().split(b'=')
native_header_length=int(line[1])
elif b'NumberOfColumns' in x:
line=x.strip().split(b'=')
NumberOfColumns=int(line[1])
elif b'NumberOfRows' in x:
line=x.strip().split(b'=')
NumberOfRows=int(line[1])
filePath.seek(PhoenixHeaderLength+native_header_length)
step3=tf.decode_raw(filePath.read(),out_type=tf.float32,little_endian=False)
depth_major=tf.reshape(step3,[2,NumberOfRows,NumberOfColumns])
image=tf.transpose(depth_major,[1,2,0])[:,:,0] #Cut off phase for now
return image
img=pathToImgTF(imgPath)
plt.imshow(img,cmap='gray')
I would like to use tf.dataset.from_tensor_slices, but it appears that isn't an option because the following code:
ds=tf.data.Dataset.from_tensor_slices([img1Path,img2Path])
ds=ds.map(pathToImgTF)
Gives the error "TypeError: Expected binary or unicode string, got tf.Tensor 'args_0:0' shape=() dtype=string"
The traceback looks to me like it's breaking specifically on 'filePath.readlines()', any help would be greatly appreciated.
Full error output:
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) ipython-input-6-e12909fb73cd in module 1 ds=tf.data.Dataset.from_tensor_slices([img1Path,img2Path]) ----> 2 ds=ds.map(pathToImgTF)
~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in map(self, map_func, num_parallel_calls) 1770 if num_parallel_calls is None: 1771 return DatasetV1Adapter( -> 1772 MapDataset(self, map_func, preserve_cardinality=False)) 1773 else: 1774 return DatasetV1Adapter(
~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in init(self, input_dataset, map_func, use_inter_op_parallelism, preserve_cardinality, use_legacy_function) 3188
self._transformation_name(), 3189 dataset=input_dataset, -> 3190 use_legacy_function=use_legacy_function) 3191 variant_tensor = gen_dataset_ops.map_dataset( 3192
input_dataset._variant_tensor, # pylint: disable=protected-access~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in init(self, func, transformation_name, dataset, input_classes, input_shapes, input_types, input_structure, add_to_graph, use_legacy_function, defun_kwargs) 2553 resource_tracker = tracking.ResourceTracker() 2554 with tracking.resource_tracker_scope(resource_tracker): -> 2555 self._function = wrapper_fn._get_concrete_function_internal() 2556 if add_to_graph: 2557
self._function.add_to_graph(ops.get_default_graph())~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal(self, *args, **kwargs) 1353
"""Bypasses error checking when getting a graph function.""" 1354
graph_function = self._get_concrete_function_internal_garbage_collected( -> 1355 *args, **kwargs) 1356 # We're returning this concrete function to someone, and they may keep a 1357 # reference to the FuncGraph without keeping a reference to the~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs) 1347 if self.input_signature: 1348 args, kwargs = None, None -> 1349 graph_function, _, _ = self._maybe_define_function(args, kwargs) 1350 return graph_function 1351
~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs) 1650
graph_function = self._function_cache.primary.get(cache_key, None)
1651 if graph_function is None: -> 1652 graph_function = self._create_graph_function(args, kwargs) 1653 self._function_cache.primary[cache_key] = graph_function 1654 return graph_function, args, kwargs~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes) 1543 arg_names=arg_names,
1544 override_flat_arg_shapes=override_flat_arg_shapes, -> 1545 capture_by_value=self._capture_by_value), 1546 self._function_attributes) 1547~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes) 713 converted_func) 714 --> 715 func_outputs = python_func(*func_args, **func_kwargs) 716 717 # invariant:
func_outputs
contains only Tensors, CompositeTensors,~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in wrapper_fn(*args) 2547 attributes=defun_kwargs)
2548 def wrapper_fn(*args): # pylint: disable=missing-docstring -> 2549 ret = _wrapper_helper(*args) 2550 ret = self._output_structure._to_tensor_list(ret) 2551 return [ops.convert_to_tensor(t) for t in ret]~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\data\ops\dataset_ops.py in _wrapper_helper(*args) 2487 nested_args = (nested_args,) 2488 -> 2489 ret = func(*nested_args) 2490 # If
func
returns a list of tensors,nest.flatten()
and 2491 #ops.convert_to_tensor()
would conspire to attempt to stackin pathToImgTF(path) 9 def pathToImgTF(path): 10 with tf.io.gfile.GFile(path,'rb') as filePath: ---> 11 step1=filePath.readlines() 12 step2=[x.strip(b'\n') for x in step1] 13 for x in step2:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\lib\io\file_io.py in readlines(self) 181 def readlines(self): 182 """Returns all lines from the file in a list.""" --> 183 self._preread_check() 184 lines = [] 185 while True:
~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\lib\io\file_io.py in _preread_check(self) 82 "File isn't open for reading") 83 self._read_buf = pywrap_tensorflow.CreateBufferedInputStream( ---> 84 compat.as_bytes(self.__name), 1024 * 512) 85 86 def _prewrite_check(self):
~\AppData\Local\Continuum\anaconda3\lib\site-packages\tensorflow\python\util\compat.py in as_bytes(bytes_or_text, encoding) 63 else: 64 raise TypeError('Expected binary or unicode string, got %r' % ---> 65 (bytes_or_text,)) 66 67
TypeError: Expected binary or unicode string, got tf.Tensor 'args_0:0' shape=() dtype=string
Upvotes: 1
Views: 1086
Reputation: 2507
I am not sure what kind of data do imgPath1
and imgPath2
have. Anyways, they must be of tensor type with predefined data type. These two consists the actual data.
If that is the case, then change the square brackets to circular brackets as follows-
ds=tf.data.Dataset.from_tensor_slices((imgPath1,imgPath2))
If this still throwing the same error, then please give information where exactly it is throwing the error.
Upvotes: 0