Skip to content
This repository has been archived by the owner on Jan 3, 2023. It is now read-only.

Commit

Permalink
Update ONNX to 1.5.0 (#386)
Browse files Browse the repository at this point in the history
* Bump up ONNX version to 1.5.0

* Xfail failing backend tests.

* Refactor old slice ut to use slice in opset 1.

* Update model zoo tests.

* Xfail yolov3 and skip bidaf big models.
  • Loading branch information
Adam Rogowiec authored and postrational committed Jun 6, 2019
1 parent 2f2fbb4 commit 9111b29
Show file tree
Hide file tree
Showing 5 changed files with 143 additions and 37 deletions.
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
numpy==1.16.3
onnx==1.4.1
onnx==1.5.0
cachetools==3.1.0
89 changes: 82 additions & 7 deletions tests/test_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,18 +160,93 @@
# Non zero -> NGONNX-472
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonzero_example_cpu)

# ConvInteger NGONNX-410
pytest.mark.xfail(OnnxBackendNodeModelTest.test_basic_convinteger_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_convinteger_with_padding_cpu)

# Quantized NGONNX-595
# Scale / zero point not a scalar
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dequantizelinear_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_qlinearconv_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_qlinearmatmul_2D_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_qlinearmatmul_3D_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_quantizelinear_cpu)

# MatmulInteger NGONNX-410
pytest.mark.xfail(OnnxBackendNodeModelTest.test_matmulinteger_cpu)

# IsInf - NGONNX-528
pytest.mark.xfail(OnnxBackendNodeModelTest.test_isinf_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_isinf_negative_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_isinf_positive_cpu)

# Pooling ops NGONNX-597
pytest.mark.xfail(OnnxBackendNodeModelTest.test_maxpool_2d_ceil_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_maxpool_2d_dilations_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_averagepool_2d_ceil_cpu)

# Modulus - NGONNX-527
pytest.mark.xfail(OnnxBackendNodeModelTest.test_mod_bcast_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_mod_float_mixed_sign_example_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_mod_fmod_mixed_sign_example_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_mod_int64_mixed_sign_example_cpu)

# NonMaxSuppression - NGONNX-526
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_center_point_box_format_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_flipped_coordinates_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_identical_boxes_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_limit_output_size_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_single_box_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_suppress_by_IOU_and_scores_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_suppress_by_IOU_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_two_batches_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_nonmaxsuppression_two_classes_cpu)

# Resize NGONNX-598
pytest.mark.xfail(OnnxBackendNodeModelTest.test_resize_downsample_linear_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_resize_downsample_nearest_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_resize_nearest_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_resize_upsample_linear_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_resize_upsample_nearest_cpu)

# ReverseSequence - NGONNX-525
pytest.mark.xfail(OnnxBackendNodeModelTest.test_reversesequence_batch_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_reversesequence_time_cpu)

# Dynamic Slice NGONNX-522, 599
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_default_axes_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_default_steps_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_end_out_of_bounds_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_neg_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_neg_steps_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_slice_start_out_of_bounds_cpu)

# StrNormalizer NGONNX-600
pytest.mark.xfail(OnnxBackendNodeModelTest.test_strnormalizer_export_monday_casesensintive_lower_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_strnormalizer_export_monday_casesensintive_nochangecase_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_strnormalizer_export_monday_casesensintive_upper_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_strnormalizer_export_monday_empty_output_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_strnormalizer_export_monday_insensintive_upper_twodim_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_strnormalizer_nostopwords_nochangecase_cpu)
pytest.mark.xfail(OnnxBackendSimpleModelTest.test_strnorm_model_monday_casesensintive_lower_cpu)
pytest.mark.xfail(OnnxBackendSimpleModelTest.test_strnorm_model_monday_casesensintive_nochangecase_cpu)
pytest.mark.xfail(OnnxBackendSimpleModelTest.test_strnorm_model_monday_casesensintive_upper_cpu)
pytest.mark.xfail(OnnxBackendSimpleModelTest.test_strnorm_model_monday_empty_output_cpu)
pytest.mark.xfail(OnnxBackendSimpleModelTest.test_strnorm_model_monday_insensintive_upper_twodim_cpu)
pytest.mark.xfail(OnnxBackendSimpleModelTest.test_strnorm_model_nostopwords_nochangecase_cpu)

# RoiAlign NGONNX-601
pytest.mark.xfail(OnnxBackendNodeModelTest.test_roialign_cpu)

# NGONNX-521
pytest.mark.xfail(OnnxBackendNodeModelTest.test_top_k_cpu)

# Other tests
pytest.mark.xfail(OnnxBackendNodeModelTest.test_instancenorm_epsilon_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_instancenorm_example_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_upsample_nearest_cpu)

# Dynamic Slice -> EXPERIMENTAL https://github.com/onnx/onnx/blob/master/docs/Operators.md#DynamicSlice
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dynamic_slice_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dynamic_slice_default_axes_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dynamic_slice_end_out_of_bounds_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dynamic_slice_neg_cpu)
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dynamic_slice_start_out_of_bounds_cpu)

# Tests which fail on the INTELGPU backend
if selected_backend_name == 'INTELGPU':
pytest.mark.xfail(OnnxBackendNodeModelTest.test_dynamic_slice_start_out_of_bounds_cpu)
Expand Down
39 changes: 16 additions & 23 deletions tests/test_reshape.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import onnx
import pytest

from tests.utils import all_arrays_equal, run_node, get_runtime
from tests.utils import all_arrays_equal, run_model, run_node, get_node_model, get_runtime
from onnx.helper import make_node, make_graph, make_tensor_value_info, make_model
from ngraph_onnx.onnx_importer.importer import import_onnx_model

Expand Down Expand Up @@ -115,58 +115,51 @@ def test_transpose():
assert np.array_equal(ng_results, [expected_output])


def test_slice():
def test_slice_opset1():
data = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])

expected_output = np.array([[5, 6, 7]])
node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'],
axes=[0, 1], starts=[1, 0], ends=[2, 3])
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, axes=[0, 1], starts=[1, 0], ends=[2, 3])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])

expected_output = np.array([[2, 3, 4]])
node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'], starts=[0, 1],
ends=[-1, 1000])
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, starts=[0, 1], ends=[-1, 1000])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])

node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'], axes=[0, 1],
starts=[0, 0], ends=[3, 10])
data = np.random.randn(20, 10, 5).astype(np.float32)
expected_output = data[0:3, 0:10]
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, axes=[0, 1], starts=[0, 0], ends=[3, 10])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])

# default axes
node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'], starts=[0, 0, 3],
ends=[20, 10, 4])
data = np.random.randn(20, 10, 5).astype(np.float32)
expected_output = data[:, :, 3:4]
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, starts=[0, 0, 3], ends=[20, 10, 4])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])

# end out of bounds
node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'], axes=[1], starts=[1],
ends=[1000])
data = np.random.randn(20, 10, 5).astype(np.float32)
expected_output = data[:, 1:1000]
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, axes=[1], starts=[1], ends=[1000])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])

# negative value
node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'], axes=[1], starts=[0],
ends=[-1])
data = np.random.randn(20, 10, 5).astype(np.float32)
expected_output = data[:, 0:-1]
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, axes=[1], starts=[0], ends=[-1])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])

# start ouf of bounds
node = onnx.helper.make_node('Slice', inputs=['x'], outputs=['y'], axes=[1], starts=[1000],
ends=[1000])
data = np.random.randn(20, 10, 5).astype(np.float32)
expected_output = data[:, 1000:1000]
ng_results = run_node(node, [data])
model = get_node_model('Slice', data, axes=[1], starts=[1000], ends=[1000])
ng_results = run_model(model, [data])
assert np.array_equal(ng_results, [expected_output])


Expand Down
38 changes: 38 additions & 0 deletions tests/test_zoo_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,14 @@
'url': _S3_MODEL_ZOO + 'arcface/resnet100/resnet100.tar.gz',
},

# BiDAF
{
'model_name': 'bidaf_opset9',
'atol': 1e-07,
'rtol': 0.001,
'url': _WINDOWS_NET + 'opset_9/bidaf/bidaf.tar.gz',
},

# BVLC AlexNet
{
'model_name': 'bvlc_alexnet_opset3',
Expand Down Expand Up @@ -362,6 +370,21 @@
'rtol': 0.001,
'url': _S3_DOWNLOAD_ONNX + 'opset_9/squeezenet.tar.gz',
},
{
'model_name': 'squeezenet1.1_opset7',
'atol': 1e-07,
'rtol': 0.001,
'url': _S3_MODEL_ZOO + 'squeezenet/squeezenet1.1/squeezenet1.1.tar.gz',

},

# SSD
{
'model_name': 'ssd_opset10',
'atol': 1e-07,
'rtol': 0.001,
'url': _WINDOWS_NET + 'opset_10/ssd/ssd.tar.gz',
},

# Tiny-YOLOv2
{
Expand All @@ -384,6 +407,14 @@
{'model_name': 'vgg19_opset8', 'url': _S3_DOWNLOAD_ONNX + 'opset_8/vgg19.tar.gz'},
{'model_name': 'vgg19_opset9', 'url': _S3_DOWNLOAD_ONNX + 'opset_9/vgg19.tar.gz'},

# YOLOv3
{
'model_name': 'yolov3_opset10',
'atol': 1e-07,
'rtol': 0.001,
'url': _WINDOWS_NET + 'opset_10/yolov3/yolov3.tar.gz',
},

# ZFNet-512
{
'model_name': 'zfnet512_opset3',
Expand Down Expand Up @@ -430,6 +461,13 @@
# ONNX ValidationError
backend_test.exclude('test_mnist_opset1')
backend_test.exclude('test_tiny_yolov2_opset1')
backend_test.exclude('test_yolov3_opset10')

# Use of unsupported domain: ai.onnx.ml
pytest.mark.skip(test_cases.test_bidaf_opset9_cpu)

# Not yet supported
backend_test.exclude('test_ssd_opset10')

# Tests which fail on the INTELGPU backend
if tests.utils.BACKEND_NAME == 'INTELGPU':
Expand Down
12 changes: 6 additions & 6 deletions tests/utils/model_zoo_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,9 +171,9 @@ def run(test_self, device): # type: (Any, Text) -> None
inputs = list(test_data['inputs'])
outputs = list(prepared_model.run(inputs))
ref_outputs = test_data['outputs']
self._assert_similar_outputs(ref_outputs, outputs,
rtol=model_test.rtol,
atol=model_test.atol)
self.assert_similar_outputs(ref_outputs, outputs,
rtol=model_test.rtol,
atol=model_test.atol)

for test_data_dir in glob.glob(
os.path.join(model_dir, 'test_data_set*')):
Expand All @@ -194,8 +194,8 @@ def run(test_self, device): # type: (Any, Text) -> None
tensor.ParseFromString(f.read())
ref_outputs.append(numpy_helper.to_array(tensor))
outputs = list(prepared_model.run(inputs))
self._assert_similar_outputs(ref_outputs, outputs,
rtol=model_test.rtol,
atol=model_test.atol)
self.assert_similar_outputs(ref_outputs, outputs,
rtol=model_test.rtol,
atol=model_test.atol)

self._add_test(kind + 'Model', model_test.name, run, model_marker)

0 comments on commit 9111b29

Please sign in to comment.