I put a group of fixed-length and variable-length features into one tf.train.SequenceExample.
context_features
length, scalar, tf.int64
site_code_raw, scalar, tf.string
Date_Local_raw, scalar, tf.string
Time_Local_raw, scalar, tf.string
Sequence_features
Orig_RefPts, [#batch, #RefPoints, 4] tf.float32
tgt_location, [#batch, 3] tf.float32
tgt_val [#batch, 1] tf.float32
The value of #RefPoints
is variable for different sequence examples. I store its value in length
feature in the context_features
. The rest features have fixed sizes.
Here is the code I am using to read & parse the data:
def read_batch_DatasetAPI(
filenames,
batch_size = 20,
num_epochs = None,
buffer_size = 5000):
dataset = tf.contrib.data.TFRecordDataset(filenames)
dataset = dataset.map(_parse_SeqExample1)
if (buffer_size is not None):
dataset = dataset.shuffle(buffer_size=buffer_size)
dataset = dataset.repeat(num_epochs)
dataset = dataset.batch(batch_size)
iterator = dataset.make_initializable_iterator()
next_element = iterator.get_next()
# next_element contains a tuple of following tensors
# length, scalar, tf.int64
# site_code_raw, scalar, tf.string
# Date_Local_raw, scalar, tf.string
# Time_Local_raw, scalar, tf.string
# Orig_RefPts, [#batch, #RefPoints, 4] tf.float32
# tgt_location, [#batch, 3] tf.float32
# tgt_val [#batch, 1] tf.float32
return iterator, next_element
def _parse_SeqExample1(in_SeqEx_proto):
# Define how to parse the example
context_features = {
'length': tf.FixedLenFeature([], dtype=tf.int64),
'site_code': tf.FixedLenFeature([], dtype=tf.string),
'Date_Local': tf.FixedLenFeature([], dtype=tf.string),
'Time_Local': tf.FixedLenFeature([], dtype=tf.string) #,
}
sequence_features = {
"input_features": tf.VarLenFeature(dtype=tf.float32),
'tgt_location_features': tf.FixedLenSequenceFeature([3], dtype=tf.float32),
'tgt_val_feature': tf.FixedLenSequenceFeature([1], dtype=tf.float32)
}
context, sequence = tf.parse_single_sequence_example(
in_SeqEx_proto,
context_features=context_features,
sequence_features=sequence_features)
# distribute the fetched context and sequence features into tensors
length = context['length']
site_code_raw = context['site_code']
Date_Local_raw = context['Date_Local']
Time_Local_raw = context['Time_Local']
# reshape the tensors according to the dimension definition above
Orig_RefPts = sequence['input_features'].values
Orig_RefPts = tf.reshape(Orig_RefPts, [-1, 4])
tgt_location = sequence['tgt_location_features']
tgt_location = tf.reshape(tgt_location, [-1])
tgt_val = sequence['tgt_val_feature']
tgt_val = tf.reshape(tgt_val, [-1])
return length, site_code_raw, Date_Local_raw, Time_Local_raw, \
Orig_RefPts, tgt_location, tgt_val
When I call read_batch_DatasetAPI
with batch_size = 1
(see the code below), it can process all (around 200,000) Sequence Examples one-by-one without any problem. But if I change the batch_size
to any number greater than 1, it simply stopped after fetching 320 to 700 Sequence Examples without any error message. I don't know how to solve this problem. Any help is appreciated!
# the iterator to get the next_element for one sample (in sequence)
iterator, next_element = read_batch_DatasetAPI(
in_tf_FWN, # the file name of the tfrecords containing ~200,000 Sequence Examples
batch_size = 1, # works when it is 1, doesn't work if > 1
num_epochs = 1,
buffer_size = None)
# tf session initialization
sess = tf.Session()
sess.run(tf.global_variables_initializer())
## reset the iterator to the beginning
sess.run(iterator.initializer)
try:
step = 0
while (True):
# get the next batch data
length, site_code_raw, Date_Local_raw, Time_Local_raw, \
Orig_RefPts, tgt_location, tgt_vale = sess.run(next_element)
step = step + 1
except tf.errors.OutOfRangeError:
# Task Done (all SeqExs have been visited)
print("closing ", in_tf_FWN)
except ValueError as err:
print("Error: {}".format(err.args))
except Exception as err:
print("Error: {}".format(err.args))