Can't convert saved keras model to TFLite

32 Views Asked by At

I tried to implement the AdaIN model on Keras. Training and inference do not cause problems.

However, I wanted to convert the model to TFlite, and then I ran into problems.

First, I saved the model as model.keras.

Secondly, there are no problems with deserialization:
model = keras.models.load('model.keras') or model = tf.keras.models.load('model.keras')
(does not affect the result and error in any way)


Now about the problem itself. According to the TensorFlow manual, I'm trying to convert the model to TFlite like this:

converter = tf.lite.TFLiteConverter.from_keras_model(model)
converted_model = converter.convert()

and I get the following error:

---------------------------------------------------------------------------
ConverterError                            Traceback (most recent call last)
Cell In[6], line 2
      1 converter = tf.lite.TFLiteConverter.from_keras_model(model)
----> 2 converted = converter.convert()

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/lite.py:1139, in _export_metrics.<locals>.wrapper(self, *args, **kwargs)
   1136 @functools.wraps(convert_func)
   1137 def wrapper(self, *args, **kwargs):
   1138   # pylint: disable=protected-access
-> 1139   return self._convert_and_export_metrics(convert_func, *args, **kwargs)

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/lite.py:1093, in TFLiteConverterBase._convert_and_export_metrics(self, convert_func, *args, **kwargs)
   1091 self._save_conversion_params_metric()
   1092 start_time = time.process_time()
-> 1093 result = convert_func(self, *args, **kwargs)
   1094 elapsed_time_ms = (time.process_time() - start_time) * 1000
   1095 if result:

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/lite.py:1601, in TFLiteKerasModelConverterV2.convert(self)
   1588 @_export_metrics
   1589 def convert(self):
   1590   """Converts a keras model based on instance variables.
   1591 
   1592   Returns:
   (...)
   1599       Invalid quantization parameters.
   1600   """
-> 1601   saved_model_convert_result = self._convert_as_saved_model()
   1602   if saved_model_convert_result:
   1603     return saved_model_convert_result

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/lite.py:1582, in TFLiteKerasModelConverterV2._convert_as_saved_model(self)
   1578   graph_def, input_tensors, output_tensors = (
   1579       self._convert_keras_to_saved_model(temp_dir)
   1580   )
   1581   if self.saved_model_dir:
-> 1582     return super(TFLiteKerasModelConverterV2, self).convert(
   1583         graph_def, input_tensors, output_tensors
   1584     )
   1585 finally:
   1586   shutil.rmtree(temp_dir, True)

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/lite.py:1371, in TFLiteConverterBaseV2.convert(self, graph_def, input_tensors, output_tensors)
   1364   logging.info(
   1365       "Using new converter: If you encounter a problem "
   1366       "please file a bug. You can opt-out "
   1367       "by setting experimental_new_converter=False"
   1368   )
   1370 # Converts model.
-> 1371 result = _convert_graphdef(
   1372     input_data=graph_def,
   1373     input_tensors=input_tensors,
   1374     output_tensors=output_tensors,
   1375     **converter_kwargs,
   1376 )
   1378 return self._optimize_tflite_model(
   1379     result, self._quant_mode, quant_io=self.experimental_new_quantizer
   1380 )

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/convert_phase.py:212, in convert_phase.<locals>.actual_decorator.<locals>.wrapper(*args, **kwargs)
    210   else:
    211     report_error_message(str(converter_error))
--> 212   raise converter_error from None  # Re-throws the exception.
    213 except Exception as error:
    214   report_error_message(str(error))

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/convert_phase.py:205, in convert_phase.<locals>.actual_decorator.<locals>.wrapper(*args, **kwargs)
    202 @functools.wraps(func)
    203 def wrapper(*args, **kwargs):
    204   try:
--> 205     return func(*args, **kwargs)
    206   except ConverterError as converter_error:
    207     if converter_error.errors:

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/convert.py:984, in convert_graphdef(input_data, input_tensors, output_tensors, **kwargs)
    981   else:
    982     model_flags.output_arrays.append(util.get_tensor_name(output_tensor))
--> 984 data = convert(
    985     model_flags,
    986     conversion_flags,
    987     input_data.SerializeToString(),
    988     debug_info_str=debug_info.SerializeToString() if debug_info else None,
    989     enable_mlir_converter=enable_mlir_converter,
    990 )
    991 return data

File ~/anaconda3/lib/python3.11/site-packages/tensorflow/lite/python/convert.py:366, in convert(model_flags, conversion_flags, input_data_str, debug_info_str, enable_mlir_converter)
    358         conversion_flags.guarantee_all_funcs_one_use = True
    359         return convert(
    360             model_flags,
    361             conversion_flags,
   (...)
    364             enable_mlir_converter,
    365         )
--> 366     raise converter_error
    368 return _run_deprecated_conversion_binary(
    369     model_flags.SerializeToString(),
    370     conversion_flags.SerializeToString(),
    371     input_data_str,
    372     debug_info_str,
    373 )

ConverterError: Could not translate MLIR to FlatBuffer.

What is the problem and how to fix it?

tensorflow==2.15.0
keras==3.0.5

0

There are 0 best solutions below