diff --git a/machine-learning/app/models/base.py b/machine-learning/app/models/base.py index 5d69b12465..7382a74cdd 100644 --- a/machine-learning/app/models/base.py +++ b/machine-learning/app/models/base.py @@ -11,6 +11,8 @@ import ann.ann from app.sessions.ort import OrtSession from app.sessions.rknn import RknnSession +import rknn.rknnpool + from ..config import clean_name, log, settings from ..schemas import ModelFormat, ModelIdentity, ModelSession, ModelTask, ModelType from ..sessions.ann import AnnSession @@ -158,7 +160,7 @@ class InferenceModel(ABC): @property def _model_format_default(self) -> ModelFormat: - if settings.rknn: + if rknn.rknnpool.is_available and settings.rknn: return ModelFormat.RKNN elif ann.ann.is_available and settings.ann : return ModelFormat.ARMNN diff --git a/machine-learning/rknn/rknnpool.py b/machine-learning/rknn/rknnpool.py index 787cbb22cb..7963139760 100644 --- a/machine-learning/rknn/rknnpool.py +++ b/machine-learning/rknn/rknnpool.py @@ -5,6 +5,20 @@ from queue import Queue from rknnlite.api import RKNNLite from concurrent.futures import ThreadPoolExecutor, as_completed +supported_socs = ["rk3566","rk3568","rk3588"] + +with open('/proc/device-tree/compatible') as f: + # Keep in mind that this is not in container by default. + # and this way of checking can't check if the rknpu driver is running or not. + device_compatible_str = f.read() + for soc in supported_socs: + if soc in device_compatible_str: + is_available = True + break + else: + is_available = False + + def initRKNN(rknnModel="./rknnModel/yolov5s.rknn", id=0): rknn_lite = RKNNLite()