@@ -121,11 +121,14 @@ def run(self, *args, **kwargs):
121
121
f .write (np .ndarray ([0 ]))
122
122
with open (os .path .join (self ._output_path , "openvino.xml" ), "wb" ) as f :
123
123
f .write (np .ndarray ([0 ]))
124
+ with open (os .path .join (self ._output_path , "model.onnx" ), "wb" ) as f :
125
+ f .write (np .ndarray ([0 ]))
124
126
125
127
return {
126
128
"outputs" : {
127
129
"bin" : os .path .join (self ._output_path , "openvino.bin" ),
128
130
"xml" : os .path .join (self ._output_path , "openvino.xml" ),
131
+ "onnx" : os .path .join (self ._output_path , "model.onnx" ),
129
132
}
130
133
}
131
134
@@ -267,26 +270,27 @@ def test_infer(self, mocker) -> None:
267
270
assert output .get_annotations ()[- 1 ].get_labels ()[0 ].probability == 0.7
268
271
269
272
@e2e_pytest_unit
270
- def test_evaluate (self ) -> None :
271
- """Test evaluate function.
273
+ def test_det_evaluate (self ) -> None :
274
+ """Test evaluate function for detection."""
272
275
273
- <Steps>
274
- 1. Create model entity
275
- 2. Create result set entity
276
- 3. Run evaluate function with same dataset, this should give 100% accuracy
277
- 4. Run evaluate function with empty dataset, this should give 0% accuracy
278
- 5. Do 1 - 4 for action detection
279
- """
280
276
_config = ModelConfiguration (DetectionConfig (), self .det_label_schema )
281
277
_model = ModelEntity (self .det_dataset , _config )
282
278
resultset = ResultSetEntity (_model , self .det_dataset , self .det_dataset )
283
279
self .det_task .evaluate (resultset )
284
280
assert resultset .performance .score .value == 1.0
285
281
282
+ @e2e_pytest_unit
283
+ def test_det_evaluate_with_empty_annotations (self ) -> None :
284
+ """Test evaluate function for detection with empty predictions."""
285
+
286
286
resultset = ResultSetEntity (_model , self .det_dataset , self .det_dataset .with_empty_annotations ())
287
287
self .det_task .evaluate (resultset )
288
288
assert resultset .performance .score .value == 0.0
289
289
290
+ @e2e_pytest_unit
291
+ def test_iseg_evaluate (self ) -> None :
292
+ """Test evaluate function for instance segmentation."""
293
+
290
294
_config = ModelConfiguration (DetectionConfig (), self .iseg_label_schema )
291
295
_model = ModelEntity (self .iseg_dataset , _config )
292
296
resultset = ResultSetEntity (_model , self .iseg_dataset , self .iseg_dataset )
@@ -315,10 +319,6 @@ def test_export(self, mocker, precision: ModelPrecision) -> None:
315
319
return_value = True ,
316
320
)
317
321
318
- with open (os .path .join (self .det_task ._output_path , "openvino.xml" ), "wb" ) as f :
319
- f .write (np .ndarray ([0 ]))
320
- with open (os .path .join (self .det_task ._output_path , "openvino.bin" ), "wb" ) as f :
321
- f .write (np .ndarray ([0 ]))
322
322
self .det_task .export (ExportType .OPENVINO , _model , precision , False )
323
323
324
324
assert _model .model_format == ModelFormat .OPENVINO
0 commit comments