This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin-on-parquet-v2
in repository https://gitbox.apache.org/repos/asf/kylin.git


The following commit(s) were added to refs/heads/kylin-on-parquet-v2 by this 
push:
     new bd5bc7a  KYLIN-4778 Some fix
bd5bc7a is described below

commit bd5bc7a118ee3cebe68937780102d13a54972a6d
Author: yaqian.zhang <598593...@qq.com>
AuthorDate: Tue Dec 8 15:42:09 2020 +0800

    KYLIN-4778 Some fix
---
 .../features/specs/query/query.spec                |  38 ++++---
 .../features/step_impl/before_suite.py             |   4 +-
 .../features/step_impl/query/query.py              |  10 +-
 .../CI/kylin-system-testing/kylin_utils/equals.py  | 115 +++++++++++++--------
 build/CI/kylin-system-testing/kylin_utils/kylin.py |   3 +-
 .../query/sql/sql_castprunesegs/query01.sql        |   3 +-
 .../query/sql/sql_castprunesegs/query02.sql        |   3 +-
 .../sql_subquery/{query30.sql => query30.sql.todo} |   0
 .../sql_subquery/{query31.sql => query31.sql.todo} |   0
 .../query/sql/sql_tableau/query02.sql              |   3 +-
 .../query/sql/sql_tableau/query21.sql              |   1 +
 .../query/sql/sql_tableau/query24.sql              |   2 +-
 .../query/sql/sql_tableau/query29.sql              |   1 +
 .../query/sql/sql_timestamp/query11.sql            |   3 +-
 .../query/sql/sql_timestamp/query12.sql            |   3 +-
 .../query/sql/sql_timestamp/query13.sql            |   3 +-
 .../query/sql/sql_timestamp/query14.sql            |   3 +-
 .../query/sql/sql_window/query00.sql               |   3 +-
 .../query/sql/sql_window/query01.sql               |   3 +-
 .../query/sql/sql_window/query07.sql               |   2 +-
 .../query/sql/sql_window/query08.sql               |   2 +-
 .../{query12.sql => query12.sql.disabled}          |   0
 .../{query13.sql => query13.sql.disabled}          |   0
 build/CI/run-ci.sh                                 |   1 +
 build/script/package.sh                            |   8 +-
 dev-support/build-release/packaging.sh             |   3 +
 dev-support/build-release/script/build_release.sh  |  15 +--
 dev-support/build-release/script/entrypoint.sh     |   5 -
 .../{query07.sql.disabled => query07.sql}          |   2 +-
 .../{query08.sql.disabled => query08.sql}          |   2 +-
 30 files changed, 140 insertions(+), 101 deletions(-)

diff --git a/build/CI/kylin-system-testing/features/specs/query/query.spec 
b/build/CI/kylin-system-testing/features/specs/query/query.spec
index 7e60885..58caf08 100644
--- a/build/CI/kylin-system-testing/features/specs/query/query.spec
+++ b/build/CI/kylin-system-testing/features/specs/query/query.spec
@@ -2,40 +2,38 @@
 Tags:4.x
 
 ## Query sql
-* Query all SQL file in directory "query/sql/sql/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql/"
+* Query all SQL file in directory "query/sql/sql/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql/"
 ## Query sql_casewhen
-* Query all SQL file in directory "query/sql/sql_casewhen/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_casewhen/"
+* Query all SQL file in directory "query/sql/sql_casewhen/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_casewhen/"
 ## Query sql_castprunesegs
-* Query all SQL file in directory "query/sql/sql_castprunesegs/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_castprunesegs/"
+* Query all SQL file in directory "query/sql/sql_castprunesegs/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_castprunesegs/"
 ## Query sql_derived
-* Query all SQL file in directory "query/sql/sql_derived/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_derived/"
+* Query all SQL file in directory "query/sql/sql_derived/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_derived/"
 ## Query sql_distinct_dim
-* Query all SQL file in directory "query/sql/sql_distinct_dim/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_distinct_dim/"
+* Query all SQL file in directory "query/sql/sql_distinct_dim/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_distinct_dim/"
 ## Query sql_distinct_precisely
-* Query all SQL file in directory "query/sql/sql_distinct_precisely/" in 
project "generic_test_project", compare result with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_distinct_precisely/"
-## Query sql_grouping
-* Query all SQL file in directory "query/sql/sql_grouping/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_grouping/"
+* Query all SQL file in directory "query/sql/sql_distinct_precisely/" in 
project "generic_test_project", compare "data_set" with hive pushdown result 
and compare metrics info with sql_result json file in 
"query/sql_result/sql_distinct_precisely/"
 ## Query sql_h2_uncapable
-* Query all SQL file in directory "query/sql/sql_h2_uncapable/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_h2_uncapable/"
+* Query all SQL file in directory "query/sql/sql_h2_uncapable/" in project 
"generic_test_project", compare "row_count" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_h2_uncapable/"
 ## Query sql_join
-* Query all SQL file in directory "query/sql/sql_join/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_join/"
+* Query all SQL file in directory "query/sql/sql_join/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql_join/"
 ## Query sql_like
-* Query all SQL file in directory "query/sql/sql_like/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_like/"
+* Query all SQL file in directory "query/sql/sql_like/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql_like/"
 ## Query sql_lookup
-* Query all SQL file in directory "query/sql/sql_lookup/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_lookup/"
+* Query all SQL file in directory "query/sql/sql_lookup/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql_lookup/"
 ## Query sql_percentile
-* Query all SQL file in directory "query/sql/sql_percentile/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_percentile/"
+* Query all SQL file in directory "query/sql/sql_percentile/" in project 
"generic_test_project", compare "row_count" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_percentile/"
 ## Query sql_subquery
-* Query all SQL file in directory "query/sql/sql_subquery/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_subquery/"
+* Query all SQL file in directory "query/sql/sql_subquery/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_subquery/"
 ## Query sql_tableau
-* Query all SQL file in directory "query/sql/sql_tableau/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_tableau/"
+* Query all SQL file in directory "query/sql/sql_tableau/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_tableau/"
 ## Query sql_timestamp
-* Query all SQL file in directory "query/sql/sql_timestamp/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_timestamp/"
+* Query all SQL file in directory "query/sql/sql_timestamp/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_timestamp/"
 ## Query sql_topn
-* Query all SQL file in directory "query/sql/sql_topn/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_topn/"
+* Query all SQL file in directory "query/sql/sql_topn/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql_topn/"
 ## Query sql_union
-* Query all SQL file in directory "query/sql/sql_union/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_union/"
+* Query all SQL file in directory "query/sql/sql_union/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql_union/"
 ## Query sql_window
-* Query all SQL file in directory "query/sql/sql_window/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in "query/sql_result/sql_window/"
+* Query all SQL file in directory "query/sql/sql_window/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in "query/sql_result/sql_window/"
 ## Query sql_select_constants
-* Query all SQL file in directory "query/sql/sql_select_constants/" in project 
"generic_test_project", compare result with hive pushdown result and compare 
metrics info with sql_result json file in 
"query/sql_result/sql_select_constants/"
\ No newline at end of file
+* Query all SQL file in directory "query/sql/sql_select_constants/" in project 
"generic_test_project", compare "data_set" with hive pushdown result and 
compare metrics info with sql_result json file in 
"query/sql_result/sql_select_constants/"
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/features/step_impl/before_suite.py 
b/build/CI/kylin-system-testing/features/step_impl/before_suite.py
index d1cd3fd..00fca21 100644
--- a/build/CI/kylin-system-testing/features/step_impl/before_suite.py
+++ b/build/CI/kylin-system-testing/features/step_impl/before_suite.py
@@ -18,6 +18,7 @@
 from getgauge.python import before_suite
 import os
 import json
+import time
 
 from kylin_utils import util
 
@@ -58,4 +59,5 @@ def create_generic_model_and_cube():
         assert json.loads(resp['cubeDescData'])['name'] == cube_name
     if client.get_cube_instance(cube_name=cube_name).get('status') != 'READY' 
and len(client.list_jobs(project_name=project_name, 
job_search_mode='CUBING_ONLY')) == 0:
         client.full_build_cube(cube_name=cube_name)
-    assert client.await_all_jobs(project_name=project_name)
+    assert client.await_all_jobs(project_name=project_name, waiting_time=50)
+    time.sleep(10)
diff --git a/build/CI/kylin-system-testing/features/step_impl/query/query.py 
b/build/CI/kylin-system-testing/features/step_impl/query/query.py
index bb5a9b9..4b4614e 100644
--- a/build/CI/kylin-system-testing/features/step_impl/query/query.py
+++ b/build/CI/kylin-system-testing/features/step_impl/query/query.py
@@ -24,8 +24,8 @@ from kylin_utils import util
 from kylin_utils import equals
 
 
-@step("Query all SQL file in directory <sql_directory> in project 
<project_name>, compare result with hive pushdown result and compare metrics 
info with sql_result json file in <sql_result_directory>")
-def query_sql_file_and_compare(sql_directory, project_name, 
sql_result_directory):
+@step("Query all SQL file in directory <sql_directory> in project 
<project_name>, compare <compare_level> with hive pushdown result and compare 
metrics info with sql_result json file in <sql_result_directory>")
+def query_sql_file_and_compare(sql_directory, project_name, compare_level, 
sql_result_directory):
     sql_directory_list = os.listdir(sql_directory)
     for sql_file_name in sql_directory_list:
         if (sql_file_name.split('.')[len(sql_file_name.split('.'))-1]) == 
'sql':
@@ -33,12 +33,12 @@ def query_sql_file_and_compare(sql_directory, project_name, 
sql_result_directory
                 sql = sql_file.read()
 
             client = util.setup_instance('kylin_instance.yml')
-            expected_result_file_name = sql_result_directory + 
sql_file_name.split(".")[0]
+            expected_result_file_name = sql_result_directory + 
sql_file_name.split(".")[0] + '.json'
             expected_result = None
             if os.path.exists(expected_result_file_name):
-                with open(sql_result_directory + sql_file_name.split(".")[0] + 
'.json', 'r', encoding='utf8') as expected_result_file:
+                with open(expected_result_file_name, 'r', encoding='utf8') as 
expected_result_file:
                     expected_result = json.loads(expected_result_file.read())
-            equals.compare_sql_result(sql=sql, project=project_name, 
kylin_client=client, expected_result=expected_result)
+            equals.compare_sql_result(sql=sql, project=project_name, 
kylin_client=client, expected_result=expected_result, 
compare_level=compare_level)
 
 
 
diff --git a/build/CI/kylin-system-testing/kylin_utils/equals.py 
b/build/CI/kylin-system-testing/kylin_utils/equals.py
index c246e45..2cceff1 100644
--- a/build/CI/kylin-system-testing/kylin_utils/equals.py
+++ b/build/CI/kylin-system-testing/kylin_utils/equals.py
@@ -16,6 +16,9 @@
 # limitations under the License.
 
 import logging
+
+from getgauge.python import Messages
+
 from kylin_utils import util
 
 _array_types = (list, tuple, set)
@@ -70,7 +73,7 @@ def api_response_equals(actual, expected, ignore=None):
             raise assert_failed
 
 
-INTEGER_FAMILY = ['TINYINT', 'SMALLINT', 'INTEGER', 'BIGINT']
+INTEGER_FAMILY = ['TINYINT', 'SMALLINT', 'INTEGER', 'BIGINT', 'INT']
 
 FRACTION_FAMILY = ['DECIMAL', 'DOUBLE', 'FLOAT']
 
@@ -88,19 +91,26 @@ def _is_family(datatype1, datatype2):
 
 
 class _Row(tuple):
-    def __init__(self, values, types):  # pylint: disable=unused-argument
+
+    def __init__(self, values, types, type_nums):  # pylint: 
disable=unused-argument
+        """
+        :param values: results of query response
+        :param types: columnTypeName of query response
+        :param type_nums: columnType of query response. check columnType equal 
when columnTypeName is not family
+        """
         tuple.__init__(self)
         if len(values) != len(types):
             raise ValueError('???')
 
         self._types = types
+        self._type_nums = type_nums
 
         self._has_fraction = False
         for datatype in self._types:
             if datatype in FRACTION_FAMILY:
                 self._has_fraction = True
 
-    def __new__(cls, values, types):  # pylint: disable=unused-argument
+    def __new__(cls, values, types, type_nums):  # pylint: 
disable=unused-argument
         return tuple.__new__(cls, values)
 
     def __eq__(self, other):
@@ -115,21 +125,27 @@ class _Row(tuple):
             otype = other._types[i]
 
             if not _is_family(stype, otype):
-                return False
+                if not self._type_nums or not other._type_nums:
+                    return False
+                if self._type_nums[i] != other._type_nums[i]:
+                    return False
 
             svalue = self[i]
             ovalue = other[i]
 
+            if svalue is None or ovalue is None:
+                if svalue == ovalue:
+                    continue
+                else:
+                    return False
+
             if stype in FRACTION_FAMILY:
-                if svalue is None and ovalue is None:
-                    return True
                 fsvalue = float(svalue)
                 fovalue = float(ovalue)
 
                 diff = abs(fsvalue - fovalue)
 
-                rate = diff / min(fsvalue, fovalue
-                                  ) if fsvalue != 0 and fovalue != 0 else diff
+                rate = diff / min(fsvalue, fovalue) if fsvalue != 0 and 
fovalue != 0 else diff
                 if abs(rate) > 0.01:
                     return False
 
@@ -144,62 +160,72 @@ class _Row(tuple):
         return 0
 
 
-def query_result_equals(expect_resp, actual_resp):
-    expect_column_types = [
-        x['columnTypeName'] for x in expect_resp['columnMetas']
-    ]
-    expect_result = [[y.strip() if y else y for y in x]
-                     for x in expect_resp['results']]
+def query_result_equals(expect_resp, actual_resp, compare_level="data_set"):
+    expect_column_types = [x['columnTypeName'] for x in 
expect_resp['columnMetas']]
+    expect_column_numbers = [x['columnType'] for x in 
expect_resp['columnMetas']]
+    expect_result = [[y.strip() if y else y for y in x] for x in 
expect_resp['results']]
 
-    actual_column_types = [
-        x['columnTypeName'] for x in actual_resp['columnMetas']
-    ]
-    actual_result = [[y.strip() if y else y for y in x]
-                     for x in actual_resp['results']]
+    actual_column_types = [x['columnTypeName'] for x in 
actual_resp['columnMetas']]
+    actual_column_numbers = [x['columnType'] for x in 
actual_resp['columnMetas']]
+    actual_result = [[y.strip() if y else y for y in x] for x in 
actual_resp['results']]
 
     if len(expect_column_types) != len(actual_column_types):
-        logging.error('column count assert failed [%s,%s]',
-                      len(expect_column_types), len(actual_column_types))
+        Messages.write_message('column count assert failed 
[{0},{1}]'.format(len(expect_column_types), len(actual_column_types)))
+        logging.error('column count assert failed [%s,%s]', 
len(expect_column_types),
+                      len(actual_column_types))
         return False
 
-    return dataset_equals(expect_result, actual_result, expect_column_types,
-                          actual_column_types)
+    if compare_level == "data_set":
+        return dataset_equals(
+            expect_result,
+            actual_result,
+            expect_column_types,
+            actual_column_types,
+            expect_column_numbers,
+            actual_column_numbers
+        )
+    if compare_level == "row_count":
+        return row_count_equals(expect_result, actual_result)
+
+
+def row_count_equals(expect_result, actual_result):
+    if len(expect_result) != len(actual_result):
+        Messages.write_message('row count assert failed 
[{0},{1}]'.format(len(expect_result), len(actual_result)))
+        logging.error('row count assert failed [%s,%s]', len(expect_result), 
len(actual_result))
+        return False
+    return True
 
 
-def dataset_equals(expect,
-                   actual,
-                   expect_col_types=None,
-                   actual_col_types=None):
+def dataset_equals(expect, actual, expect_col_types=None, 
actual_col_types=None, expect_col_nums=None,
+                   actual_col_nums=None):
     if len(expect) != len(actual):
-        logging.error('row count assert failed [%s,%s]', len(expect),
-                      len(actual))
+        Messages.write_message('row count assert failed 
[{0},{1}]'.format(len(expect), len(actual)))
+        logging.error('row count assert failed [%s,%s]', len(expect), 
len(actual))
         return False
 
     if expect_col_types is None:
         expect_col_types = ['VARCHAR'] * len(expect[0])
     expect_set = set()
     for values in expect:
-        expect_set.add(_Row(values, expect_col_types))
+        expect_set.add(_Row(values, expect_col_types, expect_col_nums))
 
     if actual_col_types is None:
-        actual_col_types = expect_col_types if expect_col_types else [
-            'VARCHAR'
-        ] * len(actual[0])
+        actual_col_types = expect_col_types if expect_col_types else 
['VARCHAR'] * len(actual[0])
     actual_set = set()
     for values in actual:
-        actual_set.add(_Row(values, actual_col_types))
+        actual_set.add(_Row(values, actual_col_types, actual_col_nums))
 
     assert_result = expect_set ^ actual_set
     if assert_result:
         logging.error('diff[%s]', len(assert_result))
-        if len(assert_result) < 20:
-            print(assert_result)
+        print(assert_result)
+        Messages.write_message("\nDiff {0}".format(assert_result))
         return False
 
     return True
 
 
-def compare_sql_result(sql, project, kylin_client, cube=None, 
expected_result=None):
+def compare_sql_result(sql, project, kylin_client, compare_level="data_set", 
cube=None, expected_result=None):
     pushdown_project = kylin_client.pushdown_project
     if not util.if_project_exists(kylin_client=kylin_client, 
project=pushdown_project):
         kylin_client.create_project(project_name=pushdown_project)
@@ -215,18 +241,17 @@ def compare_sql_result(sql, project, kylin_client, 
cube=None, expected_result=No
     kylin_resp = kylin_client.execute_query(cube_name=cube,
                                             project_name=project,
                                             sql=sql)
-    assert kylin_resp.get('isException') is False
+    assert kylin_resp.get('isException') is False, 'Thown Exception when 
execute ' + sql
 
     pushdown_resp = kylin_client.execute_query(project_name=pushdown_project, 
sql=sql)
     assert pushdown_resp.get('isException') is False
 
-    assert query_result_equals(kylin_resp, pushdown_resp)
+    assert query_result_equals(pushdown_resp, kylin_resp, 
compare_level=compare_level), Messages.write_message("Query result is different 
with pushdown query result {0}, \n------------------------------------\n Actual 
result is {1} \n\n Expected result is {2}".format(sql, 
kylin_resp.get('results'), pushdown_resp.get('results')))
 
     if expected_result is not None:
-        assert expected_result.get("cube") == kylin_resp.get("cube")
+        assert expected_result.get("cube") == kylin_resp.get("cube"), 
Messages.write_message("Sql {0} \n------------------------------------\n Query 
cube is different with json file, actual cube is {1}, expected cube is 
{2}".format(sql, kylin_resp.get("cube"), expected_result.get("cube")))
         if kylin_resp.get("cuboidIds") is not None:
-            assert expected_result.get("cuboidIds") == 
kylin_resp.get("cuboidIds")
-        assert expected_result.get("totalScanCount") == 
kylin_resp.get("totalScanCount")
-        assert expected_result.get("totalScanBytes") == 
kylin_resp.get("totalScanBytes")
-        assert expected_result.get("totalScanFiles") == 
kylin_resp.get("totalScanFiles")
-        assert expected_result.get("pushDown") == kylin_resp.get("pushDown")
\ No newline at end of file
+            assert expected_result.get("cuboidIds") == 
kylin_resp.get("cuboidIds"), Messages.write_message("Sql {0} 
\n------------------------------------\n query cuboidIds is different with json 
file, actual cuboidIds is {1}, expected cuboidIds is {2}".format(sql, 
kylin_resp.get("cuboidIds"), expected_result.get("cuboidIds")))
+        assert expected_result.get("totalScanCount") == 
kylin_resp.get("totalScanCount"), Messages.write_message("Sql {0} 
\n------------------------------------\n query totalScanCount is different with 
json file, actual totalScanCount is {1}, expected totalScanCount is 
{2}".format(sql, kylin_resp.get("totalScanCount"), 
expected_result.get("totalScanCount")))
+        assert expected_result.get("totalScanFiles") == 
kylin_resp.get("totalScanFiles"), Messages.write_message("Sql {0} 
\n------------------------------------\n query totalScanFiles is different with 
json file, actual totalScanFiles is {1}, expected totalScanFiles is 
{2}".format(sql, kylin_resp.get("totalScanFiles"), 
expected_result.get("totalScanFiles")))
+        assert expected_result.get("pushDown") == kylin_resp.get("pushDown"), 
Messages.write_message("Sql {0} \n------------------------------------\n query 
pushDown is different with json file, actual pushDown is {1}, expected pushDown 
is {2}".format(sql, kylin_resp.get("pushDown"), 
expected_result.get("pushDown")))
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/kylin_utils/kylin.py 
b/build/CI/kylin-system-testing/kylin_utils/kylin.py
index 10bd36a..4831410 100644
--- a/build/CI/kylin-system-testing/kylin_utils/kylin.py
+++ b/build/CI/kylin-system-testing/kylin_utils/kylin.py
@@ -541,7 +541,6 @@ class KylinHttpClient(BasicHttpClient):  # pylint: 
disable=too-many-public-metho
         running_flag = ['PENDING', 'RUNNING']
         try_time = 0
         max_try_time = waiting_time * 2
-        # finish_flags = ['ERROR', 'FINISHED', 'DISCARDED']
         while try_time < max_try_time:
             jobs = self.list_jobs(project_name)
             all_finished = True
@@ -549,6 +548,8 @@ class KylinHttpClient(BasicHttpClient):  # pylint: 
disable=too-many-public-metho
                 if job['job_status'] in running_flag:
                     all_finished = False
                     break
+                if job['job_status'] == 'ERROR':
+                    return False
             if all_finished:
                 return True
             time.sleep(30)
diff --git 
a/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query01.sql 
b/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query01.sql
index 9b68722..4594aab 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query01.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query01.sql
@@ -18,4 +18,5 @@
 SELECT sum(price)  as sum_price
  FROM KYLIN_SALES
  WHERE PART_DT > cast(TIMESTAMPADD(Day, -15000, CURRENT_DATE) as DATE)
-GROUP BY PART_DT
\ No newline at end of file
+GROUP BY PART_DT
+ORDER BY sum_price
\ No newline at end of file
diff --git 
a/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query02.sql 
b/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query02.sql
index 8953411..e268fe1 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query02.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_castprunesegs/query02.sql
@@ -18,4 +18,5 @@
 SELECT sum(price)  as sum_price
  FROM KYLIN_SALES
  WHERE PART_DT > '2013-06-01'
-GROUP BY PART_DT
\ No newline at end of file
+GROUP BY PART_DT
+ORDER BY sum_price
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_subquery/query30.sql 
b/build/CI/kylin-system-testing/query/sql/sql_subquery/query30.sql.todo
similarity index 100%
rename from build/CI/kylin-system-testing/query/sql/sql_subquery/query30.sql
rename to build/CI/kylin-system-testing/query/sql/sql_subquery/query30.sql.todo
diff --git a/build/CI/kylin-system-testing/query/sql/sql_subquery/query31.sql 
b/build/CI/kylin-system-testing/query/sql/sql_subquery/query31.sql.todo
similarity index 100%
rename from build/CI/kylin-system-testing/query/sql/sql_subquery/query31.sql
rename to build/CI/kylin-system-testing/query/sql/sql_subquery/query31.sql.todo
diff --git a/build/CI/kylin-system-testing/query/sql/sql_tableau/query02.sql 
b/build/CI/kylin-system-testing/query/sql/sql_tableau/query02.sql
index 4e95f8d..dad3a8a 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_tableau/query02.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_tableau/query02.sql
@@ -25,6 +25,7 @@ SELECT *
  ON KYLIN_SALES.PART_DT = KYLIN_CAL_DT.cal_dt
  where KYLIN_CAL_DT.week_beg_dt between DATE '2013-05-01' and DATE '2013-08-01'
  group by KYLIN_SALES.lstg_format_name, KYLIN_CAL_DT.week_beg_dt
- having sum(price)>500 
+ having sum(price)>500
+ order by GMV
  ) "TableauSQL" 
  LIMIT 1
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_tableau/query21.sql 
b/build/CI/kylin-system-testing/query/sql/sql_tableau/query21.sql
index f1fb579..a9cff3c 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_tableau/query21.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_tableau/query21.sql
@@ -24,4 +24,5 @@
  where KYLIN_CAL_DT.week_beg_dt between DATE '2013-05-01' and DATE '2013-08-01'
  group by KYLIN_SALES.lstg_format_name, KYLIN_CAL_DT.week_beg_dt
  having sum(price)>500
+ order by GMV
  limit 1
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_tableau/query24.sql 
b/build/CI/kylin-system-testing/query/sql/sql_tableau/query24.sql
index ab6e144..a7f3637 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_tableau/query24.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_tableau/query24.sql
@@ -20,5 +20,5 @@ select KYLIN_SALES.PART_DT, count(1) as cnt_1
 from KYLIN_SALES
 inner join KYLIN_CAL_DT AS KYLIN_CAL_DT on 
KYLIN_SALES.PART_DT=KYLIN_CAL_DT.cal_dt
 group by KYLIN_SALES.PART_DT
-order by 2 desc 
+order by 1 desc
 limit 3
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_tableau/query29.sql 
b/build/CI/kylin-system-testing/query/sql/sql_tableau/query29.sql
index 7e4b8a3..0a947d7 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_tableau/query29.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_tableau/query29.sql
@@ -25,5 +25,6 @@ SELECT *
  ON KYLIN_SALES.PART_DT = KYLIN_CAL_DT.cal_dt
  where KYLIN_CAL_DT.week_beg_dt between DATE '2013-05-01' and DATE '2013-08-01'
  group by KYLIN_SALES.lstg_format_name, KYLIN_CAL_DT.week_beg_dt
+ order by GMV
  ) "TableauSQL" 
  LIMIT 1
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query11.sql 
b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query11.sql
index 9b68722..b24e29c 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query11.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query11.sql
@@ -18,4 +18,5 @@
 SELECT sum(price)  as sum_price
  FROM KYLIN_SALES
  WHERE PART_DT > cast(TIMESTAMPADD(Day, -15000, CURRENT_DATE) as DATE)
-GROUP BY PART_DT
\ No newline at end of file
+GROUP BY PART_DT
+ORDER BY PART_DT
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query12.sql 
b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query12.sql
index 02f6b7b..6adc765 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query12.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query12.sql
@@ -18,4 +18,5 @@
 SELECT sum(price)  as sum_price
  FROM KYLIN_SALES
  WHERE PART_DT > cast(TIMESTAMPADD(Day, -2000, CURRENT_DATE) as DATE)
-GROUP BY PART_DT
\ No newline at end of file
+GROUP BY PART_DT
+ORDER BY PART_DT
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query13.sql 
b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query13.sql
index 46dedfd..3b99e3d 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query13.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query13.sql
@@ -18,4 +18,5 @@
 SELECT sum(price)  as sum_price
  FROM KYLIN_SALES
  WHERE PART_DT > cast(TIMESTAMPADD(Day, -0, CURRENT_TIMESTAMP) as DATE)
-GROUP BY PART_DT
\ No newline at end of file
+GROUP BY PART_DT
+ORDER BY PART_DT
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query14.sql 
b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query14.sql
index ea963c3..91a581d 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_timestamp/query14.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_timestamp/query14.sql
@@ -18,4 +18,5 @@
 SELECT sum(price)  as sum_price
  FROM KYLIN_SALES
  WHERE PART_DT > cast(TIMESTAMPADD(Day, -15000, CURRENT_TIMESTAMP ) as DATE)
-GROUP BY PART_DT
\ No newline at end of file
+GROUP BY PART_DT
+ORDER BY PART_DT
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_window/query00.sql 
b/build/CI/kylin-system-testing/query/sql/sql_window/query00.sql
index 1362a5e..7691c65 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_window/query00.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_window/query00.sql
@@ -17,4 +17,5 @@
 --
 select lstg_format_name, sum(price) as GMV, row_number() over(order by 
lstg_format_name)
 from KYLIN_SALES
-group by lstg_format_name
\ No newline at end of file
+group by lstg_format_name
+order by GMV
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_window/query01.sql 
b/build/CI/kylin-system-testing/query/sql/sql_window/query01.sql
index 3e30ea0..5f8c971 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_window/query01.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_window/query01.sql
@@ -17,4 +17,5 @@
 --
 select lstg_format_name, sum(price) as GMV, count(lstg_format_name) 
over(partition by lstg_format_name)
 from KYLIN_SALES
-group by part_dt, lstg_format_name
\ No newline at end of file
+group by part_dt, lstg_format_name
+order by GMV
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_window/query07.sql 
b/build/CI/kylin-system-testing/query/sql/sql_window/query07.sql
index b7041a4..3261832 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_window/query07.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_window/query07.sql
@@ -17,7 +17,7 @@
 --
 select part_dt, lstg_format_name, sum(price) as GMV,
 first_value(sum(price)) over (partition by lstg_format_name order by part_dt 
rows 2 preceding) as "prev 2 rows",
-last_value(sum(price)) over (partition by lstg_format_name order by part_dt 
rows 2 following) as "next 2 rows"
+last_value(sum(price)) over (partition by lstg_format_name order by part_dt 
rows between current row and 2 following) as "next 2 rows"
 from KYLIN_SALES
 where part_dt < '2012-02-01'
 group by part_dt, lstg_format_name
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_window/query08.sql 
b/build/CI/kylin-system-testing/query/sql/sql_window/query08.sql
index c3a55c9..2076825 100644
--- a/build/CI/kylin-system-testing/query/sql/sql_window/query08.sql
+++ b/build/CI/kylin-system-testing/query/sql/sql_window/query08.sql
@@ -17,7 +17,7 @@
 --
 select part_dt, lstg_format_name, sum(price) as GMV,
 first_value(sum(price)) over (partition by lstg_format_name order by 
cast(part_dt as timestamp) range interval '3' day preceding) as "prev 3 days",
-last_value(sum(price)) over (partition by lstg_format_name order by 
cast(part_dt as timestamp) range interval '3' day following) as "next 3 days"
+last_value(sum(price)) over (partition by lstg_format_name order by 
cast(part_dt as timestamp) range between current row and interval '3' day 
following) as "next 3 days"
 from KYLIN_SALES
 where part_dt < '2012-02-01'
 group by part_dt, lstg_format_name
\ No newline at end of file
diff --git a/build/CI/kylin-system-testing/query/sql/sql_window/query12.sql 
b/build/CI/kylin-system-testing/query/sql/sql_window/query12.sql.disabled
similarity index 100%
rename from build/CI/kylin-system-testing/query/sql/sql_window/query12.sql
rename to 
build/CI/kylin-system-testing/query/sql/sql_window/query12.sql.disabled
diff --git a/build/CI/kylin-system-testing/query/sql/sql_window/query13.sql 
b/build/CI/kylin-system-testing/query/sql/sql_window/query13.sql.disabled
similarity index 100%
rename from build/CI/kylin-system-testing/query/sql/sql_window/query13.sql
rename to 
build/CI/kylin-system-testing/query/sql/sql_window/query13.sql.disabled
diff --git a/build/CI/run-ci.sh b/build/CI/run-ci.sh
index d4c0122..b1559d0 100644
--- a/build/CI/run-ci.sh
+++ b/build/CI/run-ci.sh
@@ -60,6 +60,7 @@ kylin.env.zookeeper-connect-string=write-zookeeper:2181
 kylin.job.scheduler.default=100
 kylin.engine.spark-conf.spark.shuffle.service.enabled=false
 
kylin.query.pushdown.runner-class-name=org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl
+kylin.engine.spark-conf.spark.executor.cores=4
 EOL
 
 #cp -r apache-kylin-bin/* kylin-query
diff --git a/build/script/package.sh b/build/script/package.sh
index ef3c277..984ee60 100755
--- a/build/script/package.sh
+++ b/build/script/package.sh
@@ -75,7 +75,7 @@ cat << EOF > build/commit_SHA1
 EOF
 git rev-parse HEAD >> build/commit_SHA1
 
-sh build/script/build.sh $@         || { exit 1; }
-sh build/script/download-tomcat.sh  || { exit 1; }
-sh build/script/prepare.sh          || { exit 1; }
-sh build/script/compress.sh         || { exit 1; }
+bash build/script/build.sh $@         || { exit 1; }
+bash build/script/download-tomcat.sh  || { exit 1; }
+bash build/script/prepare.sh          || { exit 1; }
+bash build/script/compress.sh         || { exit 1; }
diff --git a/dev-support/build-release/packaging.sh 
b/dev-support/build-release/packaging.sh
index 920b057..a5c32d3 100644
--- a/dev-support/build-release/packaging.sh
+++ b/dev-support/build-release/packaging.sh
@@ -40,6 +40,9 @@ GPG_PASSPHRASE=$GPG_PASSPHRASE
 USER=$USER
 EOF
 
+docker stop kylin-release-machine
+docker rm kylin-release-machine
+
 docker run -i \
   --env-file "$ENVFILE" \
   --name kylin-release-machine \
diff --git a/dev-support/build-release/script/build_release.sh 
b/dev-support/build-release/script/build_release.sh
index 4bdcc4e..f3afcab 100644
--- a/dev-support/build-release/script/build_release.sh
+++ b/dev-support/build-release/script/build_release.sh
@@ -72,7 +72,7 @@ 
KYLIN_PACKAGE_BRANCH_HADOOP3=${GIT_BRANCH_HADOOP3:-master-hadoop3}
 ASF_USERNAME=${ASF_USERNAME:-xxyu}
 RELEASE_VERSION=${RELEASE_VERSION:-3.1.2}
 NEXT_RELEASE_VERSION=${NEXT_RELEASE_VERSION:-3.1.3}
-RUNNING_CI=${RUNNING_CI:1}
+RUNNING_CI=${RUNNING_CI:-1}
 GIT_REPO_URL=${GIT_REPO_URL:-https://github.com/apache/kylin.git}
 
 export source_release_folder=/root/kylin-release-folder/
@@ -134,16 +134,19 @@ build/script/package.sh
 if [[ "$RUNNING_CI" == "1" ]]; then
     cp dist/apache-kylin-${RELEASE_VERSION}-bin.tar.gz ${ci_package_folder}
     cd ${ci_package_folder}
-    tar -zxf dist/apache-kylin-${RELEASE_VERSION}-bin.tar.gz
+    tar -zxf apache-kylin-${RELEASE_VERSION}-bin.tar.gz
     mv apache-kylin-${RELEASE_VERSION}-bin apache-kylin-bin
     tar -cvzf apache-kylin-bin.tar.gz apache-kylin-bin
     rm -rf apache-kylin-${RELEASE_VERSION}-bin
     cd -
 fi
-tar -zxf dist/apache-kylin-${RELEASE_VERSION}-bin.tar.gz
-mv apache-kylin-${RELEASE_VERSION}-bin 
apache-kylin-${RELEASE_VERSION}-bin-hbase1x
-tar -cvzf 
~/dist/dev/kylin/apache-kylin-${KYLIN_PACKAGE_VERSION_RC}/apache-kylin-${RELEASE_VERSION}-bin-hbase1x.tar.gz
 apache-kylin-${RELEASE_VERSION}-bin-hbase1x
-rm -rf apache-kylin-${RELEASE_VERSION}-bin-hbase1x
+
+if [[ "$RELEASE_ENABLE" == "1" ]]; then
+  tar -zxf dist/apache-kylin-${RELEASE_VERSION}-bin.tar.gz
+  mv apache-kylin-${RELEASE_VERSION}-bin 
apache-kylin-${RELEASE_VERSION}-bin-hbase1x
+  tar -cvzf 
~/dist/dev/kylin/apache-kylin-${KYLIN_PACKAGE_VERSION_RC}/apache-kylin-${RELEASE_VERSION}-bin-hbase1x.tar.gz
 apache-kylin-${RELEASE_VERSION}-bin-hbase1x
+  rm -rf apache-kylin-${RELEASE_VERSION}-bin-hbase1x
+fi
 
 #build/script/package.sh -P cdh5.7
 #tar -zxf dist/apache-kylin-${RELEASE_VERSION}-bin.tar.gz
diff --git a/dev-support/build-release/script/entrypoint.sh 
b/dev-support/build-release/script/entrypoint.sh
index 66df942..7a68749 100644
--- a/dev-support/build-release/script/entrypoint.sh
+++ b/dev-support/build-release/script/entrypoint.sh
@@ -18,8 +18,3 @@
 #
 
 bash -x /root/build_release.sh package > /root/build.log
-
-while :
-do
-    sleep 10
-done
diff --git a/kylin-it/src/test/resources/query/sql_window/query07.sql.disabled 
b/kylin-it/src/test/resources/query/sql_window/query07.sql
similarity index 93%
rename from kylin-it/src/test/resources/query/sql_window/query07.sql.disabled
rename to kylin-it/src/test/resources/query/sql_window/query07.sql
index cd1b1d2..40e889f 100644
--- a/kylin-it/src/test/resources/query/sql_window/query07.sql.disabled
+++ b/kylin-it/src/test/resources/query/sql_window/query07.sql
@@ -17,7 +17,7 @@
 --
 select cal_dt, lstg_format_name, sum(price) as GMV,
 first_value(sum(price)) over (partition by lstg_format_name order by cal_dt 
rows 2 preceding) as "prev 2 rows",
-last_value(sum(price)) over (partition by lstg_format_name order by cal_dt 
rows 2 following) as "next 2 rows"
+last_value(sum(price)) over (partition by lstg_format_name order by cal_dt 
rows between current row and 2 following) as "next 2 rows"
 from test_kylin_fact
 where cal_dt < '2012-02-01'
 group by cal_dt, lstg_format_name
diff --git a/kylin-it/src/test/resources/query/sql_window/query08.sql.disabled 
b/kylin-it/src/test/resources/query/sql_window/query08.sql
similarity index 91%
rename from kylin-it/src/test/resources/query/sql_window/query08.sql.disabled
rename to kylin-it/src/test/resources/query/sql_window/query08.sql
index f99eabe..15d5b3c 100644
--- a/kylin-it/src/test/resources/query/sql_window/query08.sql.disabled
+++ b/kylin-it/src/test/resources/query/sql_window/query08.sql
@@ -17,7 +17,7 @@
 --
 select cal_dt, lstg_format_name, sum(price) as GMV,
 first_value(sum(price)) over (partition by lstg_format_name order by 
cast(cal_dt as timestamp) range interval '3' day preceding) as "prev 3 days",
-last_value(sum(price)) over (partition by lstg_format_name order by 
cast(cal_dt as timestamp) range interval '3' day following) as "next 3 days"
+last_value(sum(price)) over (partition by lstg_format_name order by 
cast(cal_dt as timestamp) range between current row and interval '3' day 
following) as "next 3 days"
 from test_kylin_fact
 where cal_dt < '2012-02-01'
 group by cal_dt, lstg_format_name

Reply via email to