|
5 | 5 | import numpy as np |
6 | 6 | from PIL import Image |
7 | 7 |
|
8 | | -from .utils import InputType, LoadImage, OrtInferSession, ResizePad |
| 8 | +from .utils import InputType, LoadImage, OrtInferSession |
9 | 9 |
|
10 | 10 | cur_dir = Path(__file__).resolve().parent |
11 | 11 | q_cls_model_path = cur_dir / "models" / "table_cls.onnx" |
12 | 12 | yolo_cls_model_path = cur_dir / "models" / "yolo_cls.onnx" |
| 13 | +yolo_cls_x_model_path = cur_dir / "models" / "yolo_cls_x.onnx" |
13 | 14 |
|
14 | 15 |
|
15 | 16 | class TableCls: |
16 | 17 | def __init__(self, model_type="yolo", model_path=yolo_cls_model_path): |
17 | 18 | if model_type == "yolo": |
18 | 19 | self.table_engine = YoloCls(model_path) |
| 20 | + elif model_type == "yolox": |
| 21 | + self.table_engine = YoloCls(yolo_cls_x_model_path) |
19 | 22 | else: |
20 | 23 | model_path = q_cls_model_path |
21 | 24 | self.table_engine = QanythingCls(model_path) |
@@ -66,8 +69,9 @@ def __init__(self, model_path): |
66 | 69 | self.cls = {0: "wireless", 1: "wired"} |
67 | 70 |
|
68 | 71 | def preprocess(self, img): |
69 | | - img, *_ = ResizePad(img, 640) |
70 | | - img = np.array(img, dtype=np.float32) / 255.0 |
| 72 | + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) |
| 73 | + img = cv2.resize(img, (640, 640)) |
| 74 | + img = np.array(img, dtype=np.float32) / 255 |
71 | 75 | img = img.transpose(2, 0, 1) # HWC to CHW |
72 | 76 | img = np.expand_dims(img, axis=0) # Add batch dimension, only one image |
73 | 77 | return img |
|
0 commit comments