<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Tensorflow Savedmodel to tflite conversion which supports imx8mp NPU in i.MX Processors</title>
    <link>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2169449#M240723</link>
    <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;Thanks for the reply.&lt;/P&gt;&lt;P&gt;I will try to create new ticket on this meanwhile after your vacation please support here.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks and Regards,&lt;/P&gt;&lt;P&gt;GV Subba Reddy&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
    <pubDate>Mon, 15 Sep 2025 07:09:08 GMT</pubDate>
    <dc:creator>subbareddyai</dc:creator>
    <dc:date>2025-09-15T07:09:08Z</dc:date>
    <item>
      <title>Tensorflow Savedmodel to tflite conversion which supports imx8mp NPU</title>
      <link>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2167437#M240664</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;I would like to convert the&amp;nbsp;Tensorflow Savedmodel to tflite model which supports imx8mp NPU.&lt;/P&gt;&lt;P&gt;I followed the below steps with no success&lt;/P&gt;&lt;P&gt;python models/research/object_detection/exporter_main_v2.py \&lt;BR /&gt;--input_type image_tensor \&lt;BR /&gt;--pipeline_config_path training_dir/pipeline.config \&lt;BR /&gt;--trained_checkpoint_dir training_dir/checkpoint \&lt;BR /&gt;--output_directory exported-model&lt;/P&gt;&lt;P&gt;and I make sure its fixed shape&amp;nbsp;model {&lt;BR /&gt;ssd {&lt;BR /&gt;image_resizer {&lt;BR /&gt;fixed_shape_resizer {&lt;BR /&gt;height: 320&lt;BR /&gt;width: 320&lt;BR /&gt;}&lt;BR /&gt;}&lt;BR /&gt;}&lt;BR /&gt;}&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;and also&amp;nbsp;Ensure TFLite-Compatible Ops&amp;nbsp;&lt;/P&gt;&lt;P&gt;ssd {&lt;BR /&gt;feature_extractor {&lt;BR /&gt;type: "ssd_mobilenet_v2_fpn_keras"&lt;BR /&gt;use_depthwise: true&lt;BR /&gt;}&lt;BR /&gt;box_predictor {&lt;BR /&gt;convolutional_box_predictor {&lt;BR /&gt;use_depthwise: true&lt;BR /&gt;}&lt;BR /&gt;}&lt;BR /&gt;}&lt;/P&gt;&lt;P&gt;tflite conversion script&amp;nbsp;&lt;/P&gt;&lt;P&gt;import tensorflow as tf&lt;BR /&gt;import pathlib&lt;/P&gt;&lt;P&gt;saved_model_dir = "exported-model/saved_model"&lt;/P&gt;&lt;P&gt;converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir)&lt;BR /&gt;converter.optimizations = [tf.lite.Optimize.DEFAULT]&lt;/P&gt;&lt;P&gt;# Provide representative dataset for INT8 calibration&lt;BR /&gt;def representative_data_gen():&lt;BR /&gt;data_dir = pathlib.Path("dataset/val")&lt;BR /&gt;for img_path in data_dir.glob("*.jpg"):&lt;BR /&gt;img = tf.keras.preprocessing.image.load_img(img_path, target_size=(320, 320))&lt;BR /&gt;img = tf.keras.preprocessing.image.img_to_array(img)&lt;BR /&gt;img = img[tf.newaxis, ...] / 255.0&lt;BR /&gt;yield [img.astype("float32")]&lt;/P&gt;&lt;P&gt;converter.representative_dataset = representative_data_gen&lt;BR /&gt;converter.target_spec.supported_ops = [tf.lite.OpsSet.TFLITE_BUILTINS_INT8]&lt;BR /&gt;converter.inference_input_type = tf.uint8&lt;BR /&gt;converter.inference_output_type = tf.uint8&lt;/P&gt;&lt;P&gt;tflite_model = converter.convert()&lt;/P&gt;&lt;P&gt;with open("model_int8.tflite", "wb") as f:&lt;BR /&gt;f.write(tflite_model)&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;command to run the inference with&amp;nbsp;model_int8.tflite&amp;nbsp;&lt;/P&gt;&lt;P&gt;$ USE_GPU_INFERENCE=0 \&lt;BR /&gt;python3 label_image.py -m model_int8.tflite \&lt;BR /&gt;-e /usr/lib/liblitert_vx_delegate.so&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;please help me if these steps correct.&amp;nbsp;&amp;nbsp;&lt;/P&gt;&lt;P&gt;all the above steps from the chatgpt.&lt;/P&gt;</description>
      <pubDate>Thu, 11 Sep 2025 05:19:35 GMT</pubDate>
      <guid>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2167437#M240664</guid>
      <dc:creator>subbareddyai</dc:creator>
      <dc:date>2025-09-11T05:19:35Z</dc:date>
    </item>
    <item>
      <title>Re: Tensorflow Savedmodel to tflite conversion which supports imx8mp NPU</title>
      <link>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2167516#M240668</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.nxp.com/t5/user/viewprofilepage/user-id/254707"&gt;@subbareddyai&lt;/a&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;You can use our eIQ tool &lt;SPAN&gt;conversion&amp;nbsp;the model.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;&lt;A href="https://www.nxp.com/design/design-center/software/eiq-ai-development-environment/eiq-toolkit-for-end-to-end-model-development-and-deployment:EIQ-TOOLKIT" target="_blank" rel="noopener"&gt;https://www.nxp.com/design/design-center/software/eiq-ai-development-environment/eiq-toolkit-for-end-to-end-model-development-and-deployment:EIQ-TOOLKIT&lt;/A&gt;&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;B.R&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 11 Sep 2025 06:54:02 GMT</pubDate>
      <guid>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2167516#M240668</guid>
      <dc:creator>pengyong_zhang</dc:creator>
      <dc:date>2025-09-11T06:54:02Z</dc:date>
    </item>
    <item>
      <title>Re: Tensorflow Savedmodel to tflite conversion which supports imx8mp NPU</title>
      <link>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2167799#M240673</link>
      <description>&lt;P&gt;I tried converting using eiq tool and tflite. conversion with qunatization and int8&amp;nbsp; is success(Refer attached image for quantization settings that I have used for conversion) but when I use the tflite model with inference its saying the below error&lt;/P&gt;&lt;P&gt;Failed to load delegate or model with delegate. Trying without delegate. Error: Didn't find op for builtin opcode 'EXP' version '2'. An older version of this builtin might be supported. Are you using an old TFLite binary with a newer model?&lt;BR /&gt;Registration failed.&lt;/P&gt;&lt;P&gt;Traceback (most recent call last):&lt;BR /&gt;File "inference_quant_int8.py", line 50, in &amp;lt;module&amp;gt;&lt;BR /&gt;interpreter = Interpreter(model_path=MODEL_PATH, experimental_delegates=[delegate])&lt;BR /&gt;File "/home/root/miniforge3/envs/tflite/lib/python3.8/site-packages/tflite_runtime/interpreter.py", line 455, in __init__&lt;BR /&gt;_interpreter_wrapper.CreateWrapperFromFile(&lt;BR /&gt;ValueError: Didn't find op for builtin opcode 'EXP' version '2'. An older version of this builtin might be supported. Are you using an old TFLite binary with a newer model?&lt;BR /&gt;Registration failed.&amp;nbsp;&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Thu, 11 Sep 2025 10:45:39 GMT</pubDate>
      <guid>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2167799#M240673</guid>
      <dc:creator>subbareddyai</dc:creator>
      <dc:date>2025-09-11T10:45:39Z</dc:date>
    </item>
    <item>
      <title>Re: Tensorflow Savedmodel to tflite conversion which supports imx8mp NPU</title>
      <link>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2169341#M240717</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://community.nxp.com/t5/user/viewprofilepage/user-id/254707"&gt;@subbareddyai&lt;/a&gt;&amp;nbsp;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;I will be on vacation for a week and may not reply during this period. If you are in a hurry, you can create a new ticket. My other colleagues will support you. Thanks for your understand.&lt;/SPAN&gt;&lt;/P&gt;
&lt;P&gt;&lt;SPAN&gt;B.R&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 15 Sep 2025 04:14:38 GMT</pubDate>
      <guid>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2169341#M240717</guid>
      <dc:creator>pengyong_zhang</dc:creator>
      <dc:date>2025-09-15T04:14:38Z</dc:date>
    </item>
    <item>
      <title>Re: Tensorflow Savedmodel to tflite conversion which supports imx8mp NPU</title>
      <link>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2169449#M240723</link>
      <description>&lt;P&gt;Hi,&lt;/P&gt;&lt;P&gt;Thanks for the reply.&lt;/P&gt;&lt;P&gt;I will try to create new ticket on this meanwhile after your vacation please support here.&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;&lt;P&gt;Thanks and Regards,&lt;/P&gt;&lt;P&gt;GV Subba Reddy&lt;/P&gt;&lt;P&gt;&amp;nbsp;&lt;/P&gt;</description>
      <pubDate>Mon, 15 Sep 2025 07:09:08 GMT</pubDate>
      <guid>https://community.nxp.com/t5/i-MX-Processors/Tensorflow-Savedmodel-to-tflite-conversion-which-supports-imx8mp/m-p/2169449#M240723</guid>
      <dc:creator>subbareddyai</dc:creator>
      <dc:date>2025-09-15T07:09:08Z</dc:date>
    </item>
  </channel>
</rss>

