From f53e1aa30cfb39e4ee04a978ef1f40f4908c2a7c Mon Sep 17 00:00:00 2001 From: "B. Gawrych" Date: Tue, 29 Jun 2021 09:49:43 +0200 Subject: [PATCH 1/2] Switch hybrid_forward to forward in test_fc_int8_fp32_outputs --- tests/python/mkl/subgraphs/test_fc_subgraph.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/python/mkl/subgraphs/test_fc_subgraph.py b/tests/python/mkl/subgraphs/test_fc_subgraph.py index 6351bfe0bbeb..cdead7582f36 100644 --- a/tests/python/mkl/subgraphs/test_fc_subgraph.py +++ b/tests/python/mkl/subgraphs/test_fc_subgraph.py @@ -188,10 +188,10 @@ def __init__(self, **kwargs): self.dense0 = nn.Dense(64) self.dense1 = nn.Dense(64) - def hybrid_forward(self, F, x): + def forward(self, x): x = self.dense0(x) - y = self.dense1(x) # quantizable - z = F.softmax(x) # non quantizable + y = self.dense1(x) # quantizable + z = mx.npx.softmax(x) # non quantizable return y + z attrs = {'fc': {}} From c1eab47be4d35ec6f689246838a12bd1c39a2f24 Mon Sep 17 00:00:00 2001 From: "B. Gawrych" Date: Tue, 29 Jun 2021 12:50:04 +0200 Subject: [PATCH 2/2] Add use_np decorator --- tests/python/mkl/subgraphs/test_fc_subgraph.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/python/mkl/subgraphs/test_fc_subgraph.py b/tests/python/mkl/subgraphs/test_fc_subgraph.py index cdead7582f36..9f50a4756f5e 100644 --- a/tests/python/mkl/subgraphs/test_fc_subgraph.py +++ b/tests/python/mkl/subgraphs/test_fc_subgraph.py @@ -175,8 +175,10 @@ def infer_shape(self, x, *args): rtol=1e-2, atol=1e-2, etol=0.01) +@mx.util.use_np @pytest.mark.parametrize('data_shape', DATA_SHAPE) -def test_fc_int8_and_fp32_outputs(data_shape): +@pytest.mark.parametrize('flatten', [True, False]) +def test_fc_int8_and_fp32_outputs(data_shape, flatten): # /---> Quantizable op # Input ---> FC -| @@ -185,8 +187,8 @@ def test_fc_int8_and_fp32_outputs(data_shape): class MultiOutputFC(nn.HybridBlock): def __init__(self, **kwargs): super(MultiOutputFC, self).__init__(**kwargs) - self.dense0 = nn.Dense(64) - self.dense1 = nn.Dense(64) + self.dense0 = nn.Dense(64, flatten=flatten) + self.dense1 = nn.Dense(64, flatten=flatten) def forward(self, x): x = self.dense0(x) @@ -196,4 +198,4 @@ def forward(self, x): attrs = {'fc': {}} net = MultiOutputFC() - check_fusion(net, data_shape, attrs, check_quantization=True) + check_fusion(net, data_shape, attrs, check_quantization=flatten)