Skip to content

Commit

Permalink
[ONNX] Reduced memory consumption while running tests (#23628)
Browse files Browse the repository at this point in the history
### Details:
 - Significantly reduced amount of using RAM while testing
- May introduce test regression in multi-worker scenario (-n auto), but
it isn't detected while validation

### Tickets:
 - 129958
  • Loading branch information
gkrivor authored Mar 25, 2024
1 parent cda5a02 commit 8c82e7c
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions src/frontends/onnx/tests/tests_python/utils/model_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,14 +141,15 @@ def _execute_pb_data(
executed_tests = executed_tests + 1
return executed_tests


def _add_model_import_test(self, model_test: ExtOnnxTestCase) -> None:
# model is loaded at runtime, note sometimes it could even
# never loaded if the test skipped
model_marker = [None] # type: List[Optional[Union[ModelProto, NodeProto]]]

def run_import(test_self: Any, device: Text) -> None:
model = ModelImportRunner._load_onnx_model(model_test.model_dir, model_test.model)
model_marker[0] = model
model_marker[0] = model_test.model_dir / model_test.model
assert import_onnx_model(model)

self._add_test("ModelImport", model_test.name, run_import, model_marker)
Expand All @@ -160,7 +161,7 @@ def _add_model_execution_test(self, model_test: ExtOnnxTestCase) -> None:

def run_execution(test_self: Any, device: Text) -> None:
model = ModelImportRunner._load_onnx_model(model_test.model_dir, model_test.model)
model_marker[0] = model
model_marker[0] = model_test.model_dir / model_test.model
prepared_model = self.backend.prepare(model, device)
assert prepared_model is not None
executed_tests = ModelImportRunner._execute_npz_data(
Expand Down

0 comments on commit 8c82e7c

Please sign in to comment.