2012-01-31 10:13:51 +00:00
|
|
|
(function(){
|
2013-11-27 05:33:17 +00:00
|
|
|
var ITERATION_COUNT = 5;
|
2012-01-31 10:13:51 +00:00
|
|
|
var DRT = {
|
|
|
|
baseURL: "./resources/dromaeo/web/index.html",
|
|
|
|
|
|
|
|
setup: function(testName) {
|
|
|
|
var iframe = document.createElement("iframe");
|
2013-03-03 23:19:31 +00:00
|
|
|
var url = DRT.baseURL + "?" + testName + '&numTests=' + ITERATION_COUNT;
|
2012-01-31 10:13:51 +00:00
|
|
|
iframe.setAttribute("src", url);
|
2012-12-04 10:00:54 +00:00
|
|
|
document.body.insertBefore(iframe, document.body.firstChild);
|
2012-01-31 10:13:51 +00:00
|
|
|
iframe.addEventListener(
|
|
|
|
"load", function() {
|
|
|
|
DRT.targetDocument = iframe.contentDocument;
|
|
|
|
DRT.targetWindow = iframe.contentDocument.defaultView;
|
|
|
|
});
|
2012-03-16 03:13:28 +00:00
|
|
|
|
2012-01-31 10:13:51 +00:00
|
|
|
window.addEventListener(
|
|
|
|
"message",
|
|
|
|
function(event) {
|
|
|
|
switch(event.data.name) {
|
|
|
|
case "dromaeo:ready":
|
|
|
|
DRT.start();
|
|
|
|
break;
|
|
|
|
case "dromaeo:progress":
|
|
|
|
DRT.progress(event.data);
|
|
|
|
break;
|
|
|
|
case "dromaeo:alldone":
|
|
|
|
DRT.teardown(event.data);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
},
|
|
|
|
|
2013-11-27 05:33:17 +00:00
|
|
|
testObject: function(name) {
|
2014-01-15 08:01:52 +00:00
|
|
|
return {customIterationCount: ITERATION_COUNT, doNotMeasureMemoryUsage: true, doNotIgnoreInitialRun: true, unit: 'runs/s',
|
2013-11-27 05:33:17 +00:00
|
|
|
name: name, continueTesting: !!name};
|
|
|
|
},
|
|
|
|
|
2012-01-31 10:13:51 +00:00
|
|
|
start: function() {
|
|
|
|
DRT.targetWindow.postMessage({ name: "dromaeo:start" } , "*");
|
|
|
|
},
|
|
|
|
|
|
|
|
progress: function(message) {
|
Simplify and reformat the output of performance tests inside test runners
https://bugs.webkit.org/show_bug.cgi?id=124496
Reviewed by Antti Koivisto.
PerformanceTests:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output performance tests generate.
Also modernize the output to better support "metric" concept we introduced a while ago.
New output on Dromaeo/dom-attr looks like this:
-----------------------------------------------
Running 5 times
getAttribute -> [1105, 1108, 1134, 1137, 1154]
element.property -> [1634, 1655, 1685, 1696, 1723]
setAttribute -> [646.3536463536464, 651, 651, 656.3436563436563, 658]
element.property = value -> [934, 949, 963, 964, 974]
element.expando = value -> [419, 419.5804195804196, 421.57842157842157, 425.57442557442556, 429]
element.expando -> [501, 517, 519.4805194805194, 521.4785214785214, 525]
1: 117.40644785571585 runs/s
2: 118.84720469666297 runs/s
3: 119.80547640905021 runs/s
4: 120.51886194758805 runs/s
5: 121.51924380569295 runs/s
:Time -> [117.40644785571585, 118.84720469666297, 119.80547640905021, 120.51886194758805, 121.51924380569295] runs/s
mean: 119.619446942942 runs/s
median: 119.80547640905021 runs/s
stdev: 1.5769040458730506 runs/s
min: 117.40644785571585 runs/s
max: 121.51924380569295 runs/s
-----------------------------------------------
* Dromaeo/resources/dromaeorunner.js:
(DRT.progress): Use the new format for subtest reports.
* resources/runner.js:
(.): Declare verboseLogging, which is set to true outside of test runners.
(PerfTestRunner.logInfo): Use verboseLogging instead of directly checking window.testRunner.
(PerfTestRunner.logDetail): Added. Logs informative text with a label such as "mean: 123 s" with 4-space indentation.
(PerfTestRunner.logStatistics): Use logDetail.
(.start): Initialize verboseLogging. Also log "Running 20 times" as an informative log using logDetail.
(.ignoreWarmUpAndLog): Use logDetail for showing the progress. These logs were useless inside test runners anyway
because perftest didn't get to see any output until the test finished running.
(.finish): Call logStatistics with metric name as opposed to a label. Each metric name is now prefixed with ':' to be
distinguishable from subtests, making the new format forward compatible.
Tools:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output
performance tests generate. Instead of spitting out noise in PerfTestRunner (runner.js)
and ignoring it in PerfTest._filter_output (perftest.py), simply avoid generating it in
the first place.
Also modernize the output to adopt "metric" concept better and make it forward compatible
with subtests.
With this patch, performance tests written using runner.js only produces empty lines or
lines of the following format inside test runners (DumpRenderTree and WebKitTestRunner):
<subtest name> -> [<value 1>, <value 2>, ...]
:<metric name> -> [<value 1>, <value 2>, ...]
This greatly simplifies the parsing logic inside PerfTest._run_with_driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest): Removed a bunch of regular expressions that are no longer used.
(PerfTest._run_with_driver): Just parse the values and description and treat everything
else as errors.
* Scripts/webkitpy/performance_tests/perftest_unittest.py:
(TestPerfTest.test_parse_output): Removed the junk.
(TestPerfTest._assert_failed_on_line): Extracted from test_parse_output_with_failing_line,
which was removed in favor of the tests below.
(TestPerfTest.test_parse_output_with_running_five_times): Added.
(TestPerfTest.test_parse_output_with_detailed_info): Added.
(TestPerfTest.test_parse_output_with_statistics): Added.
(TestPerfTest.test_parse_output_with_description): Removed the junk.
(TestPerfTest.test_parse_output_with_subtests): Ditto.
(TestSingleProcessPerfTest.test_use_only_one_process): Ditto.
* Scripts/webkitpy/performance_tests/perftestsrunner_integrationtest.py:
(EventTargetWrapperTestData): Ditto.
(SomeParserTestData): Ditto.
(MemoryTestData): Ditto.
LayoutTests:
Rebaseline the expected result now that the output has been simplified.
* fast/harness/perftests/runs-per-second-log-expected.txt:
Canonical link: https://commits.webkit.org/142730@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159465 268f45cc-cd09-0410-ab3c-d52691b4dbfc
2013-11-18 23:40:36 +00:00
|
|
|
var score = message.status.score;
|
|
|
|
if (score)
|
2013-11-27 05:33:17 +00:00
|
|
|
PerfTestRunner.reportValues(this.testObject(score.name), score.times);
|
2012-01-31 10:13:51 +00:00
|
|
|
},
|
|
|
|
|
|
|
|
teardown: function(data) {
|
2012-12-04 10:00:54 +00:00
|
|
|
PerfTestRunner.log('');
|
|
|
|
|
|
|
|
var tests = data.result;
|
|
|
|
var times = [];
|
|
|
|
for (var i = 0; i < tests.length; ++i) {
|
|
|
|
for (var j = 0; j < tests[i].times.length; ++j) {
|
|
|
|
var runsPerSecond = tests[i].times[j];
|
|
|
|
times[j] = (times[j] || 0) + 1 / runsPerSecond;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-11-27 05:33:17 +00:00
|
|
|
PerfTestRunner.reportValues(this.testObject(), times.map(function (time) { return 1 / time; }));
|
2012-01-31 10:13:51 +00:00
|
|
|
},
|
|
|
|
|
|
|
|
targetDelegateOf: function(functionName) {
|
|
|
|
return function() {
|
|
|
|
DRT.targetWindow[functionName].apply(null, arguments);
|
|
|
|
};
|
|
|
|
},
|
|
|
|
|
|
|
|
log: function(text) {
|
2012-01-31 21:35:51 +00:00
|
|
|
PerfTestRunner.log(text);
|
2012-01-31 10:13:51 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// These functions are referred from htmlrunner.js
|
|
|
|
this.startTest = DRT.targetDelegateOf("startTest");
|
|
|
|
this.test = DRT.targetDelegateOf("test");
|
|
|
|
this.endTest = DRT.targetDelegateOf("endTest");
|
|
|
|
this.prep = DRT.targetDelegateOf("prep");
|
|
|
|
|
|
|
|
window.DRT = DRT;
|
2012-08-09 16:22:05 +00:00
|
|
|
})();
|