Skip to content

Commit

Permalink
Fix get_default_qat_qconfig for PT 1.13 (pytorch#88876)
Browse files Browse the repository at this point in the history
  • Loading branch information
peterjc123 authored and kulinseth committed Dec 9, 2022
1 parent a6a9f34 commit 7524206
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 1 deletion.
32 changes: 32 additions & 0 deletions test/quantization/core/test_top_level_apis.py
Expand Up @@ -59,3 +59,35 @@ def test_fake_quants(self) -> None:
for observer in self.fake_quants:
obs = self._get_observer_ins(observer)
obs.forward(t)


class TestQConfig(TestCase):

REDUCE_RANGE_DICT = {
'fbgemm': (True, False),
'qnnpack': (False, False),
'onednn': (False, False),
'x86': (True, False),
}

def test_reduce_range_qat(self) -> None:
for backend, reduce_ranges in self.REDUCE_RANGE_DICT.items():
for version in range(2):
qconfig = torch.ao.quantization.get_default_qat_qconfig(backend, version)

fake_quantize_activ = qconfig.activation()
self.assertEqual(fake_quantize_activ.activation_post_process.reduce_range, reduce_ranges[0])

fake_quantize_weight = qconfig.weight()
self.assertEqual(fake_quantize_weight.activation_post_process.reduce_range, reduce_ranges[1])

def test_reduce_range(self) -> None:
for backend, reduce_ranges in self.REDUCE_RANGE_DICT.items():
for version in range(1):
qconfig = torch.ao.quantization.get_default_qconfig(backend, version)

fake_quantize_activ = qconfig.activation()
self.assertEqual(fake_quantize_activ.reduce_range, reduce_ranges[0])

fake_quantize_weight = qconfig.weight()
self.assertEqual(fake_quantize_weight.reduce_range, reduce_ranges[1])
2 changes: 1 addition & 1 deletion torch/ao/quantization/qconfig.py
Expand Up @@ -339,7 +339,7 @@ def get_default_qat_qconfig(backend='fbgemm', version=1):
quant_min=0,
quant_max=255),
weight=default_per_channel_weight_fake_quant)
if backend == 'x86':
elif backend == 'x86':
qconfig = QConfig(activation=FakeQuantize.with_args(observer=MovingAverageMinMaxObserver,
quant_min=0,
quant_max=255,
Expand Down

0 comments on commit 7524206

Please sign in to comment.