NPU usage for AI models

cancel
Showing results for 
Show  only  | Search instead for 
Did you mean: 

NPU usage for AI models

527 Views
Mohan51
Contributor II

Hi, I am trying to utilize NPU for my currency model..for that i converted my tensorflow model to quantized tflite model (using EIQ). but still it is using Cores.I tried with the nxp example and it run successfully, i am attaching the screenshot for your reference.

 

While running my inference code, the logs it showing external delegate is created for NXP example. but it is not showing the message 'external delegate is created ' for my custom model.

 

In the last line it is shown that tensorflow lite delegate xnnpack is created for cpu. but why it is creating for cpu?

 

this is my inference code:

from fastapi import FastAPI, File, UploadFile, HTTPException
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.staticfiles import StaticFiles
import tflite_runtime.interpreter as tflite
import cv2
import numpy as np
import os


USE_HARDWARE_INFERENCE = True

if os.environ.get('USE_HARDWARE_INFERENCE') == '0':
USE_HARWARE_INFERENCE = False

if (USE_HARDWARE_INFERENCE):
delegates = [tflite.load_delegate('/usr/lib/libethosu_delegate.so')]
else:
delegates = []

app = FastAPI()

# Load class names and TensorFlow Lite Interpreter for currency classification
currency_model_path = "/home/root/currency/Xception_saved_converted.tflite" # Update with the actual path to your classification model
currency_class_names_path = "labels.txt" # Update with the actual path to your class names file
with open(currency_class_names_path, "r") as file:
currency_class_names = [line.strip() for line in file.readlines()]

currency_interpreter = tflite.Interpreter(model_path=currency_model_path,experimental_delegates = delegates)
currency_interpreter.allocate_tensors()

# Serve static files
app.mount("/static", StaticFiles(directory="static"), name="static")

@app.get("/", response_class=HTMLResponse)
async def get_index():
with open("static/index.html", "r") as file:
html_content = file.read()
return HTMLResponse(content=html_content)

def classify_currency(image
input_details = currency_interpreter.get_input_details()
output_details = currency_interpreter.get_output_details()

input_shape = input_details[0]['shape']
img = cv2.resize(image, (input_shape[1], input_shape[2]))
img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
input_data = np.expand_dims(img_rgb, axis=0).astype(np.float32)

currency_interpreter.set_tensor(input_details[0]['index'], input_data)
currency_interpreter.invoke()

output_data = currency_interpreter.get_tensor(output_details[0]['index'])
class_index = np.argmax(output_data)
class_name = currency_class_names[class_index]
return class_name

@app.post("/capture-and-detect/")
async def capture_and_detect(image_file: UploadFile = File(...)):
contents = await image_file.read()
 
nparr = np.frombuffer(contents, np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)

if img is None:
raise HTTPException(status_code=400, detail="Invalid image file")

# Classify the image
classification_result = classify_currency(img)

result = {
"classification": classification_result
}

return JSONResponse(content=result)

if __name__ == "__main__":
import uvicorn
import os

# Ensure the static directory exists
if not os.path.exists("static"
os.makedirs("static")
 
uvicorn.run(app, host="0.0.0.0", port=81)

 

 

0 Kudos
Reply
1 Reply

496 Views
Bio_TICFSL
NXP TechSupport
NXP TechSupport

Hello,

Which board are you using? Which BSP? this run normal with NPU but you are using FASTAPI this does not include support for NPU that's the reason of it.

Regards

0 Kudos
Reply