- Revision
- 212182
- Author
- clo...@igalia.com
- Date
- 2017-02-10 22:01:20 -0800 (Fri, 10 Feb 2017)
Log Message
REGRESSION(r182916) run-perf-tests never timeouts
https://bugs.webkit.org/show_bug.cgi?id=167626
Reviewed by Ryosuke Niwa.
The --time-out-ms value (defaults to 600 seconds) of run-perf-tests
was beeing ignored because the driver was created with no_timeout=True
unconditionally.
This adds a new parameter --no-timeout (disabled by default), that
allows to use the driver with the expected timeout values.
It also passes --no-timeout to DRT/WTR to ensure that timeouts
are always controlled by the Python Driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest._create_driver): Only disable timeout if --no-timeout is passed.
(PerfTest.run): Disabling timeout should be done when the Driver is created.
* Scripts/webkitpy/performance_tests/perftestsrunner.py:
(PerfTestsRunner.__init__): Ensure --no-timeout is passed to DRT/WTR
(PerfTestsRunner._parse_args): Add a --no-timeout parameter.
(_run_tests_set): Pass the no-timeout parameter.
* Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
(MainTest.test_drt_notimeout): Test --no-timeout is passed to DRT/WTR
(MainTest.test_default_args): Test default values for additional_drt_flag and no_timeout.
(MainTest.test_parse_args): Test that the argument no-timeout.
Modified Paths
Diff
Modified: trunk/Tools/ChangeLog (212181 => 212182)
--- trunk/Tools/ChangeLog 2017-02-11 05:49:23 UTC (rev 212181)
+++ trunk/Tools/ChangeLog 2017-02-11 06:01:20 UTC (rev 212182)
@@ -1,3 +1,30 @@
+2017-02-10 Carlos Alberto Lopez Perez <clo...@igalia.com>
+
+ REGRESSION(r182916) run-perf-tests never timeouts
+ https://bugs.webkit.org/show_bug.cgi?id=167626
+
+ Reviewed by Ryosuke Niwa.
+
+ The --time-out-ms value (defaults to 600 seconds) of run-perf-tests
+ was beeing ignored because the driver was created with no_timeout=True
+ unconditionally.
+ This adds a new parameter --no-timeout (disabled by default), that
+ allows to use the driver with the expected timeout values.
+ It also passes --no-timeout to DRT/WTR to ensure that timeouts
+ are always controlled by the Python Driver.
+
+ * Scripts/webkitpy/performance_tests/perftest.py:
+ (PerfTest._create_driver): Only disable timeout if --no-timeout is passed.
+ (PerfTest.run): Disabling timeout should be done when the Driver is created.
+ * Scripts/webkitpy/performance_tests/perftestsrunner.py:
+ (PerfTestsRunner.__init__): Ensure --no-timeout is passed to DRT/WTR
+ (PerfTestsRunner._parse_args): Add a --no-timeout parameter.
+ (_run_tests_set): Pass the no-timeout parameter.
+ * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
+ (MainTest.test_drt_notimeout): Test --no-timeout is passed to DRT/WTR
+ (MainTest.test_default_args): Test default values for additional_drt_flag and no_timeout.
+ (MainTest.test_parse_args): Test that the argument no-timeout.
+
2017-02-10 Daniel Bates <daba...@apple.com>
Detach frame from document when entering page cache
Modified: trunk/Tools/Scripts/webkitpy/performance_tests/perftest.py (212181 => 212182)
--- trunk/Tools/Scripts/webkitpy/performance_tests/perftest.py 2017-02-11 05:49:23 UTC (rev 212181)
+++ trunk/Tools/Scripts/webkitpy/performance_tests/perftest.py 2017-02-11 06:01:20 UTC (rev 212182)
@@ -122,12 +122,12 @@
def prepare(self, time_out_ms):
return True
- def _create_driver(self):
- return self._port.create_driver(worker_number=0, no_timeout=True)
+ def _create_driver(self, no_timeout):
+ return self._port.create_driver(worker_number=0, no_timeout=no_timeout)
- def run(self, time_out_ms):
+ def run(self, time_out_ms, no_timeout=False):
for _ in xrange(self._test_runner_count):
- driver = self._create_driver()
+ driver = self._create_driver(no_timeout)
try:
if not self._run_with_driver(driver, time_out_ms):
return None
Modified: trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py (212181 => 212182)
--- trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py 2017-02-11 05:49:23 UTC (rev 212181)
+++ trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner.py 2017-02-11 06:01:20 UTC (rev 212182)
@@ -66,6 +66,9 @@
self._host = Host()
self._port = self._host.port_factory.get(self._options.platform, self._options)
+ # Timeouts are controlled by the Python Driver, so DRT/WTR runs with no-timeout.
+ self._options.additional_drt_flag.append('--no-timeout')
+
# The GTK+ and EFL ports only supports WebKit2, so they always use WKTR.
if self._port.name().startswith("gtk") or self._port.name().startswith("efl"):
self._options.webkit_test_runner = True
@@ -100,6 +103,8 @@
help="Path to the directory under which build files are kept (should not include configuration)"),
optparse.make_option("--time-out-ms", default=600 * 1000,
help="Set the timeout for each test"),
+ optparse.make_option("--no-timeout", action="" default=False,
+ help="Disable test timeouts"),
optparse.make_option("--no-results", action="" dest="generate_results", default=True,
help="Do no generate results JSON and results page."),
optparse.make_option("--output-json-path", action='', callback=_expand_path, type="str",
@@ -375,7 +380,8 @@
for i, test in enumerate(tests):
_log.info('Running %s (%d of %d)' % (test.test_name(), i + 1, len(tests)))
start_time = time.time()
- metrics = test.run(self._options.time_out_ms)
+ metrics = test.run(self._options.time_out_ms, self._options.no_timeout)
+
if metrics:
self._results += metrics
else:
Modified: trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py (212181 => 212182)
--- trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py 2017-02-11 05:49:23 UTC (rev 212181)
+++ trunk/Tools/Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py 2017-02-11 06:01:20 UTC (rev 212182)
@@ -55,6 +55,10 @@
runner._host.filesystem.maybe_make_directory(dirname)
runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
+ def test_drt_notimeout(self):
+ runner, port = self.create_runner()
+ self.assertEqual(runner._options.additional_drt_flag, ['--no-timeout'])
+
def test_collect_tests(self):
runner, port = self.create_runner()
self._add_file(runner, 'inspector', 'a_file.html', 'a content')
@@ -116,11 +120,13 @@
options, args = PerfTestsRunner._parse_args([])
self.assertTrue(options.build)
self.assertEqual(options.time_out_ms, 600 * 1000)
+ self.assertEqual(options.additional_drt_flag, [])
self.assertTrue(options.generate_results)
self.assertTrue(options.show_results)
self.assertTrue(options.use_skipped_list)
self.assertEqual(options.repeat, 1)
self.assertEqual(options.test_runner_count, -1)
+ self.assertEqual(options.no_timeout, False)
def test_parse_args(self):
runner, port = self.create_runner()
@@ -139,7 +145,8 @@
'--additional-drt-flag=--awesomesauce',
'--repeat=5',
'--test-runner-count=5',
- '--debug'])
+ '--debug',
+ '--no-timeout'])
self.assertTrue(options.build)
self.assertEqual(options.build_directory, 'folder42')
self.assertEqual(options.platform, 'platform42')
@@ -155,6 +162,7 @@
self.assertEqual(options.additional_drt_flag, ['--enable-threaded-parser', '--awesomesauce'])
self.assertEqual(options.repeat, 5)
self.assertEqual(options.test_runner_count, 5)
+ self.assertEqual(options.no_timeout, True)
def test_upload_json(self):
runner, port = self.create_runner()