Rename onnx_eval.py to eval_onnx.py (#3)
Browse files- Rename onnx_eval.py to eval_onnx.py (a95e32e79efa7849727de3cb3c62527715a7286d)
Co-authored-by: Xiaodong Wang <[email protected]>
onnx_eval.py → eval_onnx.py
RENAMED
|
@@ -87,7 +87,7 @@ def run(data,
|
|
| 87 |
exist_ok=False, # existing project/name ok, do not increment
|
| 88 |
half=True, # use FP16 half-precision inference
|
| 89 |
plots=False,
|
| 90 |
-
|
| 91 |
ipu=False,
|
| 92 |
provider_config='',
|
| 93 |
):
|
|
@@ -99,14 +99,14 @@ def run(data,
|
|
| 99 |
(save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
|
| 100 |
|
| 101 |
# Load model
|
| 102 |
-
if isinstance(
|
| 103 |
-
|
| 104 |
if ipu:
|
| 105 |
providers = ["VitisAIExecutionProvider"]
|
| 106 |
provider_options = [{"config_file": provider_config}]
|
| 107 |
-
onnx_model = onnxruntime.InferenceSession(
|
| 108 |
else:
|
| 109 |
-
onnx_model = onnxruntime.InferenceSession(
|
| 110 |
|
| 111 |
# Data
|
| 112 |
data = check_dataset(data) # check
|
|
@@ -255,7 +255,7 @@ def parse_opt():
|
|
| 255 |
parser.add_argument('--name', default='exp', help='save to project/name')
|
| 256 |
parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment')
|
| 257 |
parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference')
|
| 258 |
-
parser.add_argument('
|
| 259 |
parser.add_argument('--ipu', action='store_true', help='flag for ryzen ai')
|
| 260 |
parser.add_argument('--provider_config', default='', type=str, help='provider config for ryzen ai')
|
| 261 |
opt = parser.parse_args()
|
|
|
|
| 87 |
exist_ok=False, # existing project/name ok, do not increment
|
| 88 |
half=True, # use FP16 half-precision inference
|
| 89 |
plots=False,
|
| 90 |
+
onnx_model="./yolov5s.onnx",
|
| 91 |
ipu=False,
|
| 92 |
provider_config='',
|
| 93 |
):
|
|
|
|
| 99 |
(save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir
|
| 100 |
|
| 101 |
# Load model
|
| 102 |
+
if isinstance(onnx_model, list):
|
| 103 |
+
onnx_model = onnx_model[0]
|
| 104 |
if ipu:
|
| 105 |
providers = ["VitisAIExecutionProvider"]
|
| 106 |
provider_options = [{"config_file": provider_config}]
|
| 107 |
+
onnx_model = onnxruntime.InferenceSession(onnx_model, providers=providers, provider_options=provider_options)
|
| 108 |
else:
|
| 109 |
+
onnx_model = onnxruntime.InferenceSession(onnx_model)
|
| 110 |
|
| 111 |
# Data
|
| 112 |
data = check_dataset(data) # check
|
|
|
|
| 255 |
parser.add_argument('--name', default='exp', help='save to project/name')
|
| 256 |
parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment')
|
| 257 |
parser.add_argument('--half', action='store_true', help='use FP16 half-precision inference')
|
| 258 |
+
parser.add_argument('--onnx_model', default='./yolov5s.onnx', nargs='+', type=str, help='path to your onnx_weights')
|
| 259 |
parser.add_argument('--ipu', action='store_true', help='flag for ryzen ai')
|
| 260 |
parser.add_argument('--provider_config', default='', type=str, help='provider config for ryzen ai')
|
| 261 |
opt = parser.parse_args()
|