This is an automated email from the ASF dual-hosted git repository.

masahi pushed a commit to branch ci-docker-staging
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 16a31ea78cf0fb875ae93c5cc2e6cc298141e661
Author: Masahiro Masuda <masahi...@gmail.com>
AuthorDate: Mon Mar 20 14:18:43 2023 +0900

    fix TF tests
---
 tests/python/frontend/tensorflow/test_forward.py | 6 ++++--
 tests/scripts/request_hook/request_hook.py       | 1 +
 2 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/tests/python/frontend/tensorflow/test_forward.py 
b/tests/python/frontend/tensorflow/test_forward.py
index 1ca0f3faef..a614d05ffc 100644
--- a/tests/python/frontend/tensorflow/test_forward.py
+++ b/tests/python/frontend/tensorflow/test_forward.py
@@ -249,6 +249,8 @@ def compare_tf_with_tvm(
     targets=None,
     ignore_in_shape=False,
     convert_config=None,
+    atol=1e-5,
+    rtol=1e-5,
 ):
     """Generic function to generate and compare tensorflow and TVM output"""
 
@@ -303,7 +305,7 @@ def compare_tf_with_tvm(
             for i, tf_out in enumerate(tf_output):
                 if not isinstance(tf_out, np.ndarray):
                     assert len(tvm_output[i].shape) == 0  # pylint: 
disable=len-as-condition
-                tvm.testing.assert_allclose(tf_out, tvm_output[i], atol=1e-5, 
rtol=1e-5)
+                tvm.testing.assert_allclose(tf_out, tvm_output[i], atol=atol, 
rtol=rtol)
 
         sess.close()
 
@@ -3415,7 +3417,7 @@ def _test_forward_crop_and_resize(
             extrapolation_value=extrapolation_value,
             name="crop_and_resize",
         )
-        compare_tf_with_tvm([image], ["in_data:0"], "crop_and_resize:0")
+        compare_tf_with_tvm([image], ["in_data:0"], "crop_and_resize:0", 
atol=1e-4, rtol=1e-4)
 
 
 def test_forward_crop_and_resize():
diff --git a/tests/scripts/request_hook/request_hook.py 
b/tests/scripts/request_hook/request_hook.py
index e1b6cc7d59..323443f129 100644
--- a/tests/scripts/request_hook/request_hook.py
+++ b/tests/scripts/request_hook/request_hook.py
@@ -109,6 +109,7 @@ URL_MAP = {
     
"https://github.com/dmlc/web-data/raw/main/gluoncv/detection/street_small.jpg": 
f"{BASE}/2022-10-05/gluon-small-stree.jpg",
     
"https://github.com/dmlc/web-data/raw/main/tensorflow/models/Custom/placeholder.pb":
 f"{BASE}/dmlc/web-data/raw/main/tensorflow/models/Custom/placeholder.pb",
     
"https://github.com/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/classify_image_graph_def-with_shapes.pb":
 
f"{BASE}/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/classify_image_graph_def-with_shapes.pb",
+    
"https://github.com/dmlc/web-data/raw/main/tensorflow/models/ResnetV2/resnet-20180601_resnet_v2_imagenet-shapes.pb":
 
f"{BASE}/dmlc/web-data/raw/main/tensorflow/models/ResnetV2/resnet-20180601_resnet_v2_imagenet-shapes.pb",
     
"https://github.com/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/elephant-299.jpg":
 
f"{BASE}/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/elephant-299.jpg",
     
"https://github.com/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/imagenet_2012_challenge_label_map_proto.pbtxt":
 
f"{BASE}/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/imagenet_2012_challenge_label_map_proto.pbtxt",
     
"https://github.com/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/imagenet_synset_to_human_label_map.txt":
 
f"{BASE}/dmlc/web-data/raw/main/tensorflow/models/InceptionV1/imagenet_synset_to_human_label_map.txt",

Reply via email to