From d5e453a77389c36237d11f83fef5bc521140f234 Mon Sep 17 00:00:00 2001 From: yoni13 Date: Sat, 11 Jan 2025 16:26:17 +0800 Subject: [PATCH] ruff format --- machine-learning/app/models/base.py | 2 +- machine-learning/app/sessions/rknn.py | 18 ++++++------------ machine-learning/rknn/rknnpool.py | 12 ++++++------ 3 files changed, 13 insertions(+), 19 deletions(-) diff --git a/machine-learning/app/models/base.py b/machine-learning/app/models/base.py index 7382a74cdd..466dbaa767 100644 --- a/machine-learning/app/models/base.py +++ b/machine-learning/app/models/base.py @@ -162,7 +162,7 @@ class InferenceModel(ABC): def _model_format_default(self) -> ModelFormat: if rknn.rknnpool.is_available and settings.rknn: return ModelFormat.RKNN - elif ann.ann.is_available and settings.ann : + elif ann.ann.is_available and settings.ann: return ModelFormat.ARMNN else: return ModelFormat.ONNX diff --git a/machine-learning/app/sessions/rknn.py b/machine-learning/app/sessions/rknn.py index 28c29cc33f..c132747032 100644 --- a/machine-learning/app/sessions/rknn.py +++ b/machine-learning/app/sessions/rknn.py @@ -12,24 +12,21 @@ from app.schemas import SessionNode from ..config import log + def runInfrence(rknn_lite, input): - outputs = rknn_lite.inference(inputs=[input], data_format='nchw') + outputs = rknn_lite.inference(inputs=[input], data_format="nchw") return outputs + class RknnSession: def __init__(self, model_path: Path | str): - self.model_path = Path(model_path) self.ort_model_path = str(self.model_path).replace(".rknn", ".onnx") - self.tpe = 1 if 'textual' in str(self.model_path) else 2 + self.tpe = 1 if "textual" in str(self.model_path) else 2 log.info(f"Loading RKNN model from {self.model_path} with {self.tpe} threads.") - self.rknnpool = rknnPoolExecutor( - rknnModel=self.model_path.as_posix(), - TPEs= self.tpe, - func=runInfrence) - + self.rknnpool = rknnPoolExecutor(rknnModel=self.model_path.as_posix(), TPEs=self.tpe, func=runInfrence) self.ort_session = ort.InferenceSession( self.ort_model_path, @@ -39,7 +36,6 @@ class RknnSession: del self.ort_session - def __del__(self): self.rknnpool.release() @@ -53,11 +49,9 @@ class RknnSession: self, output_names: list[str] | None, input_feed: dict[str, NDArray[np.float32]] | dict[str, NDArray[np.int32]], - run_options: Any = None, + run_options: Any = None, ): - input_data = [np.ascontiguousarray(v) for v in input_feed.values()][0] self.rknnpool.put(input_data) outputs = self.rknnpool.get()[0] return outputs - diff --git a/machine-learning/rknn/rknnpool.py b/machine-learning/rknn/rknnpool.py index d73a7728c7..f729bfab67 100644 --- a/machine-learning/rknn/rknnpool.py +++ b/machine-learning/rknn/rknnpool.py @@ -4,11 +4,12 @@ from queue import Queue from concurrent.futures import ThreadPoolExecutor, as_completed -supported_socs = ["rk3566","rk3568","rk3588"] +supported_socs = ["rk3566", "rk3568", "rk3588"] try: from rknnlite.api import RKNNLite - with open('/proc/device-tree/compatible') as f: + + with open("/proc/device-tree/compatible") as f: # Keep in mind that this is not in container by default. # and this way of checking can't check if the rknpu driver is running or not. device_compatible_str = f.read() @@ -43,7 +44,7 @@ def initRKNNs(rknnModel="./rknnModel/yolov5s.rknn", TPEs=1): return rknn_list -class rknnPoolExecutor(): +class rknnPoolExecutor: def __init__(self, rknnModel, TPEs, func): self.TPEs = TPEs self.queue = Queue() @@ -53,8 +54,7 @@ class rknnPoolExecutor(): self.num = 0 def put(self, frame): - self.queue.put(self.pool.submit( - self.func, self.rknnPool[self.num % self.TPEs], frame)) + self.queue.put(self.pool.submit(self.func, self.rknnPool[self.num % self.TPEs], frame)) self.num += 1 def get(self): @@ -66,4 +66,4 @@ class rknnPoolExecutor(): def release(self): self.pool.shutdown() for rknn_lite in self.rknnPool: - rknn_lite.release() \ No newline at end of file + rknn_lite.release()