Modified: trunk/Tools/TestResultServer/model/jsonresults.py (130658 => 130659)
--- trunk/Tools/TestResultServer/model/jsonresults.py 2012-10-08 18:23:05 UTC (rev 130658)
+++ trunk/Tools/TestResultServer/model/jsonresults.py 2012-10-08 18:35:35 UTC (rev 130659)
@@ -131,6 +131,7 @@
if incremental_tests:
aggregated_tests = aggregated_json[JSON_RESULTS_TESTS]
cls._merge_tests(aggregated_tests, incremental_tests, num_runs)
+ cls._normalize_results(aggregated_tests, num_runs)
@classmethod
def _merge_non_test_data(cls, aggregated_json, incremental_json, num_runs):
@@ -193,7 +194,6 @@
aggregated_test = aggregated_json[test_name]
cls._insert_item_run_length_encoded(results, aggregated_test[JSON_RESULTS_RESULTS], num_runs)
cls._insert_item_run_length_encoded(times, aggregated_test[JSON_RESULTS_TIMES], num_runs)
- cls._normalize_results_json(test_name, aggregated_json, num_runs)
@classmethod
def _insert_item_run_length_encoded(cls, incremental_item, aggregated_item, num_runs):
@@ -204,21 +204,33 @@
aggregated_item.insert(0, item)
@classmethod
- def _normalize_results_json(cls, test_name, aggregated_json, num_runs):
- aggregated_test = aggregated_json[test_name]
- aggregated_test[JSON_RESULTS_RESULTS] = cls._remove_items_over_max_number_of_builds(aggregated_test[JSON_RESULTS_RESULTS], num_runs)
- aggregated_test[JSON_RESULTS_TIMES] = cls._remove_items_over_max_number_of_builds(aggregated_test[JSON_RESULTS_TIMES], num_runs)
+ def _normalize_results(cls, aggregated_json, num_runs):
+ names_to_delete = []
+ for test_name in aggregated_json:
+ if _is_directory(aggregated_json[test_name]):
+ cls._normalize_results(aggregated_json[test_name], num_runs)
+ else:
+ leaf = aggregated_json[test_name]
+ leaf[JSON_RESULTS_RESULTS] = cls._remove_items_over_max_number_of_builds(leaf[JSON_RESULTS_RESULTS], num_runs)
+ leaf[JSON_RESULTS_TIMES] = cls._remove_items_over_max_number_of_builds(leaf[JSON_RESULTS_TIMES], num_runs)
+ if cls._should_delete_leaf(leaf):
+ names_to_delete.append(test_name)
+ for test_name in names_to_delete:
+ del aggregated_json[test_name]
+
+ @classmethod
+ def _should_delete_leaf(cls, leaf):
deletable_types = set((JSON_RESULTS_PASS, JSON_RESULTS_NO_DATA, JSON_RESULTS_SKIP))
- for result in aggregated_test[JSON_RESULTS_RESULTS]:
+ for result in leaf[JSON_RESULTS_RESULTS]:
if result[1] not in deletable_types:
- return
+ return False
- for time in aggregated_test[JSON_RESULTS_TIMES]:
+ for time in leaf[JSON_RESULTS_TIMES]:
if time[1] >= JSON_RESULTS_MIN_TIME:
- return
+ return False
- del aggregated_json[test_name]
+ return True
@classmethod
def _remove_items_over_max_number_of_builds(cls, encoded_list, num_runs):
Modified: trunk/Tools/TestResultServer/model/jsonresults_unittest.py (130658 => 130659)
--- trunk/Tools/TestResultServer/model/jsonresults_unittest.py 2012-10-08 18:23:05 UTC (rev 130658)
+++ trunk/Tools/TestResultServer/model/jsonresults_unittest.py 2012-10-08 18:35:35 UTC (rev 130659)
@@ -384,6 +384,38 @@
"results": [[7,"F"]],
"times": [[7,0]]}}})
+ def test_merge_remove_new_test(self):
+ self._test_merge(
+ # Aggregated results
+ {"builds": ["2", "1"],
+ "tests": {"001.html": {
+ "results": [[199, "F"]],
+ "times": [[199, 0]]},
+ }},
+ # Incremental results
+ {"builds": ["3"],
+ "tests": {"001.html": {
+ "results": [[1, "F"]],
+ "times": [[1, 0]]},
+ "002.html": {
+ "results": [[1, "P"]],
+ "times": [[1, 0]]},
+ "003.html": {
+ "results": [[1, "N"]],
+ "times": [[1, 0]]},
+ "004.html": {
+ "results": [[1, "X"]],
+ "times": [[1, 0]]},
+ }},
+ # Expected results
+ {"builds": ["3", "2", "1"],
+ "tests": {"001.html": {
+ "results": [[200, "F"]],
+ "times": [[200, 0]]},
+ }},
+ max_builds=200)
+
+
def test_merge_remove_test(self):
self._test_merge(
# Aggregated results
@@ -510,53 +542,6 @@
"times": [[max_builds,0]]}}},
int(max_builds))
- def test_merge_build_directory_hierarchy_old_version(self):
- self._test_merge(
- # Aggregated results
- {"builds": ["2", "1"],
- "tests": {"bar/003.html": {
- "results": [[25,"F"]],
- "times": [[25,0]]},
- "foo/001.html": {
- "results": [[50,"F"]],
- "times": [[50,0]]},
- "foo/002.html": {
- "results": [[100,"I"]],
- "times": [[100,0]]}},
- "version": 3},
- # Incremental results
- {"builds": ["3"],
- "tests": {"baz": {
- "004.html": {
- "results": [[1,"I"]],
- "times": [[1,0]]}},
- "foo": {
- "001.html": {
- "results": [[1,"F"]],
- "times": [[1,0]]},
- "002.html": {
- "results": [[1,"I"]],
- "times": [[1,0]]}}},
- "version": 4},
- # Expected results
- {"builds": ["3", "2", "1"],
- "tests": {"bar": {
- "003.html": {
- "results": [[1,"N"],[25,"F"]],
- "times": [[26,0]]}},
- "baz": {
- "004.html": {
- "results": [[1,"I"]],
- "times": [[1,0]]}},
- "foo": {
- "001.html": {
- "results": [[51,"F"]],
- "times": [[51,0]]},
- "002.html": {
- "results": [[101,"I"]],
- "times": [[101,0]]}}},
- "version": 4})
-
# FIXME: Some data got corrupted and has results and times at the directory level.
# Once we've purged this from all the data, we should throw an error on this case.
def test_merge_directory_hierarchy_extra_results_and_times(self):