support for rknn.rknnpool.is_available

This commit is contained in:
yoni13 2025-01-11 10:39:45 +08:00
parent a2722e16e7
commit c20d110257
2 changed files with 17 additions and 1 deletions

View File

@ -11,6 +11,8 @@ import ann.ann
from app.sessions.ort import OrtSession
from app.sessions.rknn import RknnSession
import rknn.rknnpool
from ..config import clean_name, log, settings
from ..schemas import ModelFormat, ModelIdentity, ModelSession, ModelTask, ModelType
from ..sessions.ann import AnnSession
@ -158,7 +160,7 @@ class InferenceModel(ABC):
@property
def _model_format_default(self) -> ModelFormat:
if settings.rknn:
if rknn.rknnpool.is_available and settings.rknn:
return ModelFormat.RKNN
elif ann.ann.is_available and settings.ann :
return ModelFormat.ARMNN

View File

@ -5,6 +5,20 @@ from queue import Queue
from rknnlite.api import RKNNLite
from concurrent.futures import ThreadPoolExecutor, as_completed
supported_socs = ["rk3566","rk3568","rk3588"]
with open('/proc/device-tree/compatible') as f:
# Keep in mind that this is not in container by default.
# and this way of checking can't check if the rknpu driver is running or not.
device_compatible_str = f.read()
for soc in supported_socs:
if soc in device_compatible_str:
is_available = True
break
else:
is_available = False
def initRKNN(rknnModel="./rknnModel/yolov5s.rknn", id=0):
rknn_lite = RKNNLite()