I am using Google Colab to train an object detection Model based on a Dataset from Roboflow.com. When trying to train the model via
batch_size = 16
num_epochs = 10
train_dataset = create_dataset(TRAINING_TFRecord, batch_size)
model.fit(train_dataset, epochs=num_epochs)
I am Getting the above mentioned Error as follows:
Epoch 1/10
--------------------------------------------------------------------------- UnimplementedError Traceback (most recent call last) <ipython-input-60-99c88cc37d2a> in <cell line: 8>() 6 train_dataset = create_dataset(TRAINING_TFRecord, batch_size) 7 8 model.fit(train_dataset, epochs=num_epochs)
1 frames /usr/local/lib/python3.10/dist-packages/tensorflow/python/eager/execute.py in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name) 51 try: 52 ctx.ensure_initialized() 53 tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name, 54 inputs, attrs, num_outputs) 55 except core._NotOkStatusException as e:
UnimplementedError: Graph execution error:
Detected at node Cast defined at (most recent call last): File "/usr/lib/python3.10/runpy.py", line 196, in _run_module_as_main
File "/usr/lib/python3.10/runpy.py", line 86, in _run_code
File "/usr/local/lib/python3.10/dist-packages/colab_kernel_launcher.py", line 37, in <module>
File "/usr/local/lib/python3.10/dist-packages/traitlets/config/application.py", line 992, in launch_instance
File "/usr/local/lib/python3.10/dist-packages/ipykernel/kernelapp.py", line 619, in start
File "/usr/local/lib/python3.10/dist-packages/tornado/platform/asyncio.py", line 195, in start
File "/usr/lib/python3.10/asyncio/base_events.py", line 603, in run_forever
File "/usr/lib/python3.10/asyncio/base_events.py", line 1909, in _run_once
File "/usr/lib/python3.10/asyncio/events.py", line 80, in _run
File "/usr/local/lib/python3.10/dist-packages/tornado/ioloop.py", line 685, in <lambda>
File "/usr/local/lib/python3.10/dist-packages/tornado/ioloop.py", line 738, in _run_callback
File "/usr/local/lib/python3.10/dist-packages/tornado/gen.py", line 825, in inner
File "/usr/local/lib/python3.10/dist-packages/tornado/gen.py", line 786, in run
File "/usr/local/lib/python3.10/dist-packages/ipykernel/kernelbase.py", line 361, in process_one
File "/usr/local/lib/python3.10/dist-packages/tornado/gen.py", line 234, in wrapper
File "/usr/local/lib/python3.10/dist-packages/ipykernel/kernelbase.py", line 261, in dispatch_shell
File "/usr/local/lib/python3.10/dist-packages/tornado/gen.py", line 234, in wrapper
File "/usr/local/lib/python3.10/dist-packages/ipykernel/kernelbase.py", line 539, in execute_request
File "/usr/local/lib/python3.10/dist-packages/tornado/gen.py", line 234, in wrapper
File "/usr/local/lib/python3.10/dist-packages/ipykernel/ipkernel.py", line 302, in do_execute
File "/usr/local/lib/python3.10/dist-packages/ipykernel/zmqshell.py", line 539, in run_cell
File "/usr/local/lib/python3.10/dist-packages/IPython/core/interactiveshell.py", line 2975, in run_cell
File "/usr/local/lib/python3.10/dist-packages/IPython/core/interactiveshell.py", line 3030, in _run_cell
File "/usr/local/lib/python3.10/dist-packages/IPython/core/async_helpers.py", line 78, in _pseudo_sync_runner
File "/usr/local/lib/python3.10/dist-packages/IPython/core/interactiveshell.py", line 3257, in run_cell_async
File "/usr/local/lib/python3.10/dist-packages/IPython/core/interactiveshell.py", line 3473, in run_ast_nodes
File "/usr/local/lib/python3.10/dist-packages/IPython/core/interactiveshell.py", line 3553, in run_code
File "<ipython-input-52-99c88cc37d2a>", line 8, in <cell line: 8>
File "/usr/local/lib/python3.10/dist-packages/keras/src/utils/traceback_utils.py", line 65, in error_handler
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 1807, in fit
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 1401, in train_function
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 1384, in step_function
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 1373, in run_step
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 1151, in train_step
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 1209, in compute_loss
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/compile_utils.py", line 275, in __call__
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/compile_utils.py", line 860, in match_dtype_and_rank
2 root error(s) found. (0) UNIMPLEMENTED: Cast string to float is not supported [[{{node Cast}}]] (1) CANCELLED: Function was cancelled before it was started 0 successful operations. 0 derived errors ignored. [Op:__inference_train_function_5403]
Here are other relevant functinos i use before:
def parse_tfrecord_fn(example):
feature_description = {
'image/encoded': tf.io.FixedLenFeature([], tf.string),
'image/filename': tf.io.FixedLenFeature([], tf.string),
'image/format': tf.io.FixedLenFeature([], tf.string),
'image/height': tf.io.FixedLenFeature([], tf.int64),
'image/width': tf.io.FixedLenFeature([], tf.int64),
'image/object/bbox/xmax': tf.io.FixedLenFeature([], tf.float32),
'image/object/bbox/xmin': tf.io.FixedLenFeature([], tf.float32),
'image/object/bbox/ymax': tf.io.FixedLenFeature([], tf.float32),
'image/object/bbox/ymin': tf.io.FixedLenFeature([], tf.float32),
'image/object/class/label': tf.io.FixedLenFeature([], tf.int64),
'image/object/class/text': tf.io.FixedLenFeature([], tf.string )
}
example = tf.io.parse_single_example(example, feature_description)
image = tf.io.decode_jpeg(example['image/encoded'])
label = example['image/object/class/label']
filename = example['image/filename']
format = example['image/object/class/label']
xmin = example['image/object/bbox/xmin']
ymin = example['image/object/bbox/ymin']
xmax = example['image/object/bbox/xmax']
ymax = example['image/object/bbox/xmax']
text = example['image/object/class/text']
return image, label, text
def create_dataset(tfrecord_file, batch_size):
dataset = tf.data.TFRecordDataset(tfrecord_file)
dataset = dataset.map(parse_tfrecord_fn)
dataset = dataset.batch(batch_size)
# weitere Transformationen möglich
return dataset
from tensorflow.keras import layers, models
def create_cnn_model(input_shape, num_classes):
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=input_shape))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(128, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid')) # für Zweiklassen-Klassifikation
#model.add(layers.Dense(num_classes, activation='softmax')) # für Mehrklassen-Klassifkation
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy', # Falls die Labels als Integer vorliegen
metrics=['accuracy'])
return model
I have adapted my parse function multiple times, but couldn't fix the error.
----- ERROR IS NOW FIXED -----
As mentioned in a comment, a label must be an Integer - mine was a string. The parse section now looks as follows:
label_mapping = {"black stain": 1, "decayed tooth": 2}
def convert_label(label_string):
return label_mapping[label_string.numpy().decode('utf-8')]
def parse_tfrecord_fn(example):
feature_description = {
'image/encoded': tf.io.FixedLenFeature([], tf.string),
'image/filename': tf.io.FixedLenFeature([], tf.string),
'image/format': tf.io.FixedLenFeature([], tf.string),
'image/height': tf.io.FixedLenFeature([], tf.int64),
'image/width': tf.io.FixedLenFeature([], tf.int64),
'image/object/bbox/xmax': tf.io.FixedLenFeature([], tf.float32),
'image/object/bbox/xmin': tf.io.FixedLenFeature([], tf.float32),
'image/object/bbox/ymax': tf.io.FixedLenFeature([], tf.float32),
'image/object/bbox/ymin': tf.io.FixedLenFeature([], tf.float32),
'image/object/class/label': tf.io.FixedLenFeature([], tf.int64),
'image/object/class/text': tf.io.FixedLenFeature([], tf.string ) # String muss noch in Integer konvertiert werden
}
example = tf.io.parse_single_example(example, feature_description)
image = tf.io.decode_jpeg(example['image/encoded']),
#label = label_mapping[example['image/object/class/label']],
label = tf.py_function(convert_label, [example['image/object/class/label']], tf.int64),
filename = example['image/filename']
format = example['image/object/class/label']
xmin = example['image/object/bbox/xmin']
ymin = example['image/object/bbox/ymin']
xmax = example['image/object/bbox/xmax']
ymax = example['image/object/bbox/xmax']
text = example['image/object/class/text']
return image, label