2012-05-03 06:43:58 +00:00
|
|
|
// There are tests for computeStatistics() located in LayoutTests/fast/harness/perftests
|
2012-01-31 20:05:37 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
if (window.testRunner) {
|
|
|
|
testRunner.waitUntilDone();
|
|
|
|
testRunner.dumpAsText();
|
2012-03-15 01:07:27 +00:00
|
|
|
}
|
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
(function () {
|
2013-11-27 05:33:17 +00:00
|
|
|
var logLines = window.testRunner ? [] : null;
|
Simplify and reformat the output of performance tests inside test runners
https://bugs.webkit.org/show_bug.cgi?id=124496
Reviewed by Antti Koivisto.
PerformanceTests:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output performance tests generate.
Also modernize the output to better support "metric" concept we introduced a while ago.
New output on Dromaeo/dom-attr looks like this:
-----------------------------------------------
Running 5 times
getAttribute -> [1105, 1108, 1134, 1137, 1154]
element.property -> [1634, 1655, 1685, 1696, 1723]
setAttribute -> [646.3536463536464, 651, 651, 656.3436563436563, 658]
element.property = value -> [934, 949, 963, 964, 974]
element.expando = value -> [419, 419.5804195804196, 421.57842157842157, 425.57442557442556, 429]
element.expando -> [501, 517, 519.4805194805194, 521.4785214785214, 525]
1: 117.40644785571585 runs/s
2: 118.84720469666297 runs/s
3: 119.80547640905021 runs/s
4: 120.51886194758805 runs/s
5: 121.51924380569295 runs/s
:Time -> [117.40644785571585, 118.84720469666297, 119.80547640905021, 120.51886194758805, 121.51924380569295] runs/s
mean: 119.619446942942 runs/s
median: 119.80547640905021 runs/s
stdev: 1.5769040458730506 runs/s
min: 117.40644785571585 runs/s
max: 121.51924380569295 runs/s
-----------------------------------------------
* Dromaeo/resources/dromaeorunner.js:
(DRT.progress): Use the new format for subtest reports.
* resources/runner.js:
(.): Declare verboseLogging, which is set to true outside of test runners.
(PerfTestRunner.logInfo): Use verboseLogging instead of directly checking window.testRunner.
(PerfTestRunner.logDetail): Added. Logs informative text with a label such as "mean: 123 s" with 4-space indentation.
(PerfTestRunner.logStatistics): Use logDetail.
(.start): Initialize verboseLogging. Also log "Running 20 times" as an informative log using logDetail.
(.ignoreWarmUpAndLog): Use logDetail for showing the progress. These logs were useless inside test runners anyway
because perftest didn't get to see any output until the test finished running.
(.finish): Call logStatistics with metric name as opposed to a label. Each metric name is now prefixed with ':' to be
distinguishable from subtests, making the new format forward compatible.
Tools:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output
performance tests generate. Instead of spitting out noise in PerfTestRunner (runner.js)
and ignoring it in PerfTest._filter_output (perftest.py), simply avoid generating it in
the first place.
Also modernize the output to adopt "metric" concept better and make it forward compatible
with subtests.
With this patch, performance tests written using runner.js only produces empty lines or
lines of the following format inside test runners (DumpRenderTree and WebKitTestRunner):
<subtest name> -> [<value 1>, <value 2>, ...]
:<metric name> -> [<value 1>, <value 2>, ...]
This greatly simplifies the parsing logic inside PerfTest._run_with_driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest): Removed a bunch of regular expressions that are no longer used.
(PerfTest._run_with_driver): Just parse the values and description and treat everything
else as errors.
* Scripts/webkitpy/performance_tests/perftest_unittest.py:
(TestPerfTest.test_parse_output): Removed the junk.
(TestPerfTest._assert_failed_on_line): Extracted from test_parse_output_with_failing_line,
which was removed in favor of the tests below.
(TestPerfTest.test_parse_output_with_running_five_times): Added.
(TestPerfTest.test_parse_output_with_detailed_info): Added.
(TestPerfTest.test_parse_output_with_statistics): Added.
(TestPerfTest.test_parse_output_with_description): Removed the junk.
(TestPerfTest.test_parse_output_with_subtests): Ditto.
(TestSingleProcessPerfTest.test_use_only_one_process): Ditto.
* Scripts/webkitpy/performance_tests/perftestsrunner_integrationtest.py:
(EventTargetWrapperTestData): Ditto.
(SomeParserTestData): Ditto.
(MemoryTestData): Ditto.
LayoutTests:
Rebaseline the expected result now that the output has been simplified.
* fast/harness/perftests/runs-per-second-log-expected.txt:
Canonical link: https://commits.webkit.org/142730@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159465 268f45cc-cd09-0410-ab3c-d52691b4dbfc
2013-11-18 23:40:36 +00:00
|
|
|
var verboseLogging = false;
|
2013-11-27 05:33:17 +00:00
|
|
|
var completedIterations;
|
2012-10-02 04:57:33 +00:00
|
|
|
var callsPerIteration = 1;
|
|
|
|
var currentTest = null;
|
2013-11-27 05:33:17 +00:00
|
|
|
var results;
|
|
|
|
var jsHeapResults;
|
|
|
|
var mallocHeapResults;
|
2012-11-14 19:34:49 +00:00
|
|
|
var iterationCount = undefined;
|
2016-12-13 00:12:55 +00:00
|
|
|
var lastResponsivenessTimestamp = 0;
|
|
|
|
var _longestResponsivenessDelay = 0;
|
|
|
|
var continueCheckingResponsiveness = false;
|
2012-01-25 22:25:10 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
var PerfTestRunner = {};
|
2010-12-31 10:01:49 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
// To make the benchmark results predictable, we replace Math.random with a
|
|
|
|
// 100% deterministic alternative.
|
|
|
|
PerfTestRunner.randomSeed = PerfTestRunner.initialRandomSeed = 49734321;
|
2012-01-31 20:05:37 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.resetRandomSeed = function() {
|
|
|
|
PerfTestRunner.randomSeed = PerfTestRunner.initialRandomSeed
|
|
|
|
}
|
2012-01-31 20:05:37 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.random = Math.random = function() {
|
|
|
|
// Robert Jenkins' 32 bit integer hash function.
|
|
|
|
var randomSeed = PerfTestRunner.randomSeed;
|
|
|
|
randomSeed = ((randomSeed + 0x7ed55d16) + (randomSeed << 12)) & 0xffffffff;
|
|
|
|
randomSeed = ((randomSeed ^ 0xc761c23c) ^ (randomSeed >>> 19)) & 0xffffffff;
|
|
|
|
randomSeed = ((randomSeed + 0x165667b1) + (randomSeed << 5)) & 0xffffffff;
|
|
|
|
randomSeed = ((randomSeed + 0xd3a2646c) ^ (randomSeed << 9)) & 0xffffffff;
|
|
|
|
randomSeed = ((randomSeed + 0xfd7046c5) + (randomSeed << 3)) & 0xffffffff;
|
|
|
|
randomSeed = ((randomSeed ^ 0xb55a4f09) ^ (randomSeed >>> 16)) & 0xffffffff;
|
|
|
|
PerfTestRunner.randomSeed = randomSeed;
|
|
|
|
return (randomSeed & 0xfffffff) / 0x10000000;
|
2012-01-31 20:05:37 +00:00
|
|
|
};
|
|
|
|
|
2012-10-11 23:08:13 +00:00
|
|
|
PerfTestRunner.now = window.performance && window.performance.now ? function () { return window.performance.now(); } : Date.now;
|
2012-10-02 04:57:33 +00:00
|
|
|
|
|
|
|
PerfTestRunner.logInfo = function (text) {
|
Simplify and reformat the output of performance tests inside test runners
https://bugs.webkit.org/show_bug.cgi?id=124496
Reviewed by Antti Koivisto.
PerformanceTests:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output performance tests generate.
Also modernize the output to better support "metric" concept we introduced a while ago.
New output on Dromaeo/dom-attr looks like this:
-----------------------------------------------
Running 5 times
getAttribute -> [1105, 1108, 1134, 1137, 1154]
element.property -> [1634, 1655, 1685, 1696, 1723]
setAttribute -> [646.3536463536464, 651, 651, 656.3436563436563, 658]
element.property = value -> [934, 949, 963, 964, 974]
element.expando = value -> [419, 419.5804195804196, 421.57842157842157, 425.57442557442556, 429]
element.expando -> [501, 517, 519.4805194805194, 521.4785214785214, 525]
1: 117.40644785571585 runs/s
2: 118.84720469666297 runs/s
3: 119.80547640905021 runs/s
4: 120.51886194758805 runs/s
5: 121.51924380569295 runs/s
:Time -> [117.40644785571585, 118.84720469666297, 119.80547640905021, 120.51886194758805, 121.51924380569295] runs/s
mean: 119.619446942942 runs/s
median: 119.80547640905021 runs/s
stdev: 1.5769040458730506 runs/s
min: 117.40644785571585 runs/s
max: 121.51924380569295 runs/s
-----------------------------------------------
* Dromaeo/resources/dromaeorunner.js:
(DRT.progress): Use the new format for subtest reports.
* resources/runner.js:
(.): Declare verboseLogging, which is set to true outside of test runners.
(PerfTestRunner.logInfo): Use verboseLogging instead of directly checking window.testRunner.
(PerfTestRunner.logDetail): Added. Logs informative text with a label such as "mean: 123 s" with 4-space indentation.
(PerfTestRunner.logStatistics): Use logDetail.
(.start): Initialize verboseLogging. Also log "Running 20 times" as an informative log using logDetail.
(.ignoreWarmUpAndLog): Use logDetail for showing the progress. These logs were useless inside test runners anyway
because perftest didn't get to see any output until the test finished running.
(.finish): Call logStatistics with metric name as opposed to a label. Each metric name is now prefixed with ':' to be
distinguishable from subtests, making the new format forward compatible.
Tools:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output
performance tests generate. Instead of spitting out noise in PerfTestRunner (runner.js)
and ignoring it in PerfTest._filter_output (perftest.py), simply avoid generating it in
the first place.
Also modernize the output to adopt "metric" concept better and make it forward compatible
with subtests.
With this patch, performance tests written using runner.js only produces empty lines or
lines of the following format inside test runners (DumpRenderTree and WebKitTestRunner):
<subtest name> -> [<value 1>, <value 2>, ...]
:<metric name> -> [<value 1>, <value 2>, ...]
This greatly simplifies the parsing logic inside PerfTest._run_with_driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest): Removed a bunch of regular expressions that are no longer used.
(PerfTest._run_with_driver): Just parse the values and description and treat everything
else as errors.
* Scripts/webkitpy/performance_tests/perftest_unittest.py:
(TestPerfTest.test_parse_output): Removed the junk.
(TestPerfTest._assert_failed_on_line): Extracted from test_parse_output_with_failing_line,
which was removed in favor of the tests below.
(TestPerfTest.test_parse_output_with_running_five_times): Added.
(TestPerfTest.test_parse_output_with_detailed_info): Added.
(TestPerfTest.test_parse_output_with_statistics): Added.
(TestPerfTest.test_parse_output_with_description): Removed the junk.
(TestPerfTest.test_parse_output_with_subtests): Ditto.
(TestSingleProcessPerfTest.test_use_only_one_process): Ditto.
* Scripts/webkitpy/performance_tests/perftestsrunner_integrationtest.py:
(EventTargetWrapperTestData): Ditto.
(SomeParserTestData): Ditto.
(MemoryTestData): Ditto.
LayoutTests:
Rebaseline the expected result now that the output has been simplified.
* fast/harness/perftests/runs-per-second-log-expected.txt:
Canonical link: https://commits.webkit.org/142730@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159465 268f45cc-cd09-0410-ab3c-d52691b4dbfc
2013-11-18 23:40:36 +00:00
|
|
|
if (verboseLogging)
|
2012-10-02 04:57:33 +00:00
|
|
|
this.log(text);
|
2010-12-31 10:01:49 +00:00
|
|
|
}
|
|
|
|
|
Simplify and reformat the output of performance tests inside test runners
https://bugs.webkit.org/show_bug.cgi?id=124496
Reviewed by Antti Koivisto.
PerformanceTests:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output performance tests generate.
Also modernize the output to better support "metric" concept we introduced a while ago.
New output on Dromaeo/dom-attr looks like this:
-----------------------------------------------
Running 5 times
getAttribute -> [1105, 1108, 1134, 1137, 1154]
element.property -> [1634, 1655, 1685, 1696, 1723]
setAttribute -> [646.3536463536464, 651, 651, 656.3436563436563, 658]
element.property = value -> [934, 949, 963, 964, 974]
element.expando = value -> [419, 419.5804195804196, 421.57842157842157, 425.57442557442556, 429]
element.expando -> [501, 517, 519.4805194805194, 521.4785214785214, 525]
1: 117.40644785571585 runs/s
2: 118.84720469666297 runs/s
3: 119.80547640905021 runs/s
4: 120.51886194758805 runs/s
5: 121.51924380569295 runs/s
:Time -> [117.40644785571585, 118.84720469666297, 119.80547640905021, 120.51886194758805, 121.51924380569295] runs/s
mean: 119.619446942942 runs/s
median: 119.80547640905021 runs/s
stdev: 1.5769040458730506 runs/s
min: 117.40644785571585 runs/s
max: 121.51924380569295 runs/s
-----------------------------------------------
* Dromaeo/resources/dromaeorunner.js:
(DRT.progress): Use the new format for subtest reports.
* resources/runner.js:
(.): Declare verboseLogging, which is set to true outside of test runners.
(PerfTestRunner.logInfo): Use verboseLogging instead of directly checking window.testRunner.
(PerfTestRunner.logDetail): Added. Logs informative text with a label such as "mean: 123 s" with 4-space indentation.
(PerfTestRunner.logStatistics): Use logDetail.
(.start): Initialize verboseLogging. Also log "Running 20 times" as an informative log using logDetail.
(.ignoreWarmUpAndLog): Use logDetail for showing the progress. These logs were useless inside test runners anyway
because perftest didn't get to see any output until the test finished running.
(.finish): Call logStatistics with metric name as opposed to a label. Each metric name is now prefixed with ':' to be
distinguishable from subtests, making the new format forward compatible.
Tools:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output
performance tests generate. Instead of spitting out noise in PerfTestRunner (runner.js)
and ignoring it in PerfTest._filter_output (perftest.py), simply avoid generating it in
the first place.
Also modernize the output to adopt "metric" concept better and make it forward compatible
with subtests.
With this patch, performance tests written using runner.js only produces empty lines or
lines of the following format inside test runners (DumpRenderTree and WebKitTestRunner):
<subtest name> -> [<value 1>, <value 2>, ...]
:<metric name> -> [<value 1>, <value 2>, ...]
This greatly simplifies the parsing logic inside PerfTest._run_with_driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest): Removed a bunch of regular expressions that are no longer used.
(PerfTest._run_with_driver): Just parse the values and description and treat everything
else as errors.
* Scripts/webkitpy/performance_tests/perftest_unittest.py:
(TestPerfTest.test_parse_output): Removed the junk.
(TestPerfTest._assert_failed_on_line): Extracted from test_parse_output_with_failing_line,
which was removed in favor of the tests below.
(TestPerfTest.test_parse_output_with_running_five_times): Added.
(TestPerfTest.test_parse_output_with_detailed_info): Added.
(TestPerfTest.test_parse_output_with_statistics): Added.
(TestPerfTest.test_parse_output_with_description): Removed the junk.
(TestPerfTest.test_parse_output_with_subtests): Ditto.
(TestSingleProcessPerfTest.test_use_only_one_process): Ditto.
* Scripts/webkitpy/performance_tests/perftestsrunner_integrationtest.py:
(EventTargetWrapperTestData): Ditto.
(SomeParserTestData): Ditto.
(MemoryTestData): Ditto.
LayoutTests:
Rebaseline the expected result now that the output has been simplified.
* fast/harness/perftests/runs-per-second-log-expected.txt:
Canonical link: https://commits.webkit.org/142730@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159465 268f45cc-cd09-0410-ab3c-d52691b4dbfc
2013-11-18 23:40:36 +00:00
|
|
|
PerfTestRunner.logDetail = function (label, value) {
|
|
|
|
if (verboseLogging)
|
|
|
|
this.log(' ' + label + ': ' + value);
|
|
|
|
}
|
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.loadFile = function (path) {
|
|
|
|
var xhr = new XMLHttpRequest();
|
|
|
|
xhr.open("GET", path, false);
|
|
|
|
xhr.send(null);
|
|
|
|
return xhr.responseText;
|
|
|
|
}
|
2010-12-31 10:01:49 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.computeStatistics = function (times, unit) {
|
|
|
|
var data = times.slice();
|
|
|
|
|
|
|
|
// Add values from the smallest to the largest to avoid the loss of significance
|
|
|
|
data.sort(function(a,b){return a-b;});
|
|
|
|
|
|
|
|
var middle = Math.floor(data.length / 2);
|
|
|
|
var result = {
|
|
|
|
min: data[0],
|
|
|
|
max: data[data.length - 1],
|
|
|
|
median: data.length % 2 ? data[middle] : (data[middle - 1] + data[middle]) / 2,
|
|
|
|
};
|
|
|
|
|
2012-10-02 08:10:47 +00:00
|
|
|
// Compute the mean and variance using Knuth's online algorithm (has good numerical stability).
|
2012-10-02 04:57:33 +00:00
|
|
|
var squareSum = 0;
|
|
|
|
result.values = times;
|
2012-10-02 08:10:47 +00:00
|
|
|
result.mean = 0;
|
|
|
|
for (var i = 0; i < data.length; ++i) {
|
2012-10-02 04:57:33 +00:00
|
|
|
var x = data[i];
|
|
|
|
var delta = x - result.mean;
|
|
|
|
var sweep = i + 1.0;
|
|
|
|
result.mean += delta / sweep;
|
2012-10-02 08:10:47 +00:00
|
|
|
squareSum += delta * (x - result.mean);
|
2012-10-02 04:57:33 +00:00
|
|
|
}
|
2012-10-02 16:52:09 +00:00
|
|
|
result.variance = data.length <= 1 ? 0 : squareSum / (data.length - 1);
|
2012-10-02 04:57:33 +00:00
|
|
|
result.stdev = Math.sqrt(result.variance);
|
|
|
|
result.unit = unit || "ms";
|
2010-12-31 10:01:49 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
return result;
|
|
|
|
}
|
2012-01-31 10:13:51 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.logStatistics = function (values, unit, title) {
|
|
|
|
var statistics = this.computeStatistics(values, unit);
|
|
|
|
this.log("");
|
Simplify and reformat the output of performance tests inside test runners
https://bugs.webkit.org/show_bug.cgi?id=124496
Reviewed by Antti Koivisto.
PerformanceTests:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output performance tests generate.
Also modernize the output to better support "metric" concept we introduced a while ago.
New output on Dromaeo/dom-attr looks like this:
-----------------------------------------------
Running 5 times
getAttribute -> [1105, 1108, 1134, 1137, 1154]
element.property -> [1634, 1655, 1685, 1696, 1723]
setAttribute -> [646.3536463536464, 651, 651, 656.3436563436563, 658]
element.property = value -> [934, 949, 963, 964, 974]
element.expando = value -> [419, 419.5804195804196, 421.57842157842157, 425.57442557442556, 429]
element.expando -> [501, 517, 519.4805194805194, 521.4785214785214, 525]
1: 117.40644785571585 runs/s
2: 118.84720469666297 runs/s
3: 119.80547640905021 runs/s
4: 120.51886194758805 runs/s
5: 121.51924380569295 runs/s
:Time -> [117.40644785571585, 118.84720469666297, 119.80547640905021, 120.51886194758805, 121.51924380569295] runs/s
mean: 119.619446942942 runs/s
median: 119.80547640905021 runs/s
stdev: 1.5769040458730506 runs/s
min: 117.40644785571585 runs/s
max: 121.51924380569295 runs/s
-----------------------------------------------
* Dromaeo/resources/dromaeorunner.js:
(DRT.progress): Use the new format for subtest reports.
* resources/runner.js:
(.): Declare verboseLogging, which is set to true outside of test runners.
(PerfTestRunner.logInfo): Use verboseLogging instead of directly checking window.testRunner.
(PerfTestRunner.logDetail): Added. Logs informative text with a label such as "mean: 123 s" with 4-space indentation.
(PerfTestRunner.logStatistics): Use logDetail.
(.start): Initialize verboseLogging. Also log "Running 20 times" as an informative log using logDetail.
(.ignoreWarmUpAndLog): Use logDetail for showing the progress. These logs were useless inside test runners anyway
because perftest didn't get to see any output until the test finished running.
(.finish): Call logStatistics with metric name as opposed to a label. Each metric name is now prefixed with ':' to be
distinguishable from subtests, making the new format forward compatible.
Tools:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output
performance tests generate. Instead of spitting out noise in PerfTestRunner (runner.js)
and ignoring it in PerfTest._filter_output (perftest.py), simply avoid generating it in
the first place.
Also modernize the output to adopt "metric" concept better and make it forward compatible
with subtests.
With this patch, performance tests written using runner.js only produces empty lines or
lines of the following format inside test runners (DumpRenderTree and WebKitTestRunner):
<subtest name> -> [<value 1>, <value 2>, ...]
:<metric name> -> [<value 1>, <value 2>, ...]
This greatly simplifies the parsing logic inside PerfTest._run_with_driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest): Removed a bunch of regular expressions that are no longer used.
(PerfTest._run_with_driver): Just parse the values and description and treat everything
else as errors.
* Scripts/webkitpy/performance_tests/perftest_unittest.py:
(TestPerfTest.test_parse_output): Removed the junk.
(TestPerfTest._assert_failed_on_line): Extracted from test_parse_output_with_failing_line,
which was removed in favor of the tests below.
(TestPerfTest.test_parse_output_with_running_five_times): Added.
(TestPerfTest.test_parse_output_with_detailed_info): Added.
(TestPerfTest.test_parse_output_with_statistics): Added.
(TestPerfTest.test_parse_output_with_description): Removed the junk.
(TestPerfTest.test_parse_output_with_subtests): Ditto.
(TestSingleProcessPerfTest.test_use_only_one_process): Ditto.
* Scripts/webkitpy/performance_tests/perftestsrunner_integrationtest.py:
(EventTargetWrapperTestData): Ditto.
(SomeParserTestData): Ditto.
(MemoryTestData): Ditto.
LayoutTests:
Rebaseline the expected result now that the output has been simplified.
* fast/harness/perftests/runs-per-second-log-expected.txt:
Canonical link: https://commits.webkit.org/142730@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159465 268f45cc-cd09-0410-ab3c-d52691b4dbfc
2013-11-18 23:40:36 +00:00
|
|
|
this.log(title + " -> [" + statistics.values.join(", ") + "] " + statistics.unit);
|
|
|
|
["mean", "median", "stdev", "min", "max"].forEach(function (name) {
|
|
|
|
PerfTestRunner.logDetail(name, statistics[name] + ' ' + statistics.unit);
|
|
|
|
});
|
2012-10-02 04:57:33 +00:00
|
|
|
}
|
2012-09-28 04:16:48 +00:00
|
|
|
|
2012-12-04 10:00:54 +00:00
|
|
|
function getUsedMallocHeap() {
|
2012-10-02 04:57:33 +00:00
|
|
|
var stats = window.internals.mallocStatistics();
|
|
|
|
return stats.committedVMBytes - stats.freeListBytes;
|
|
|
|
}
|
2012-09-28 04:16:48 +00:00
|
|
|
|
2012-12-04 10:00:54 +00:00
|
|
|
function getUsedJSHeap() {
|
2013-06-05 07:08:40 +00:00
|
|
|
return window.internals.memoryInfo().usedJSHeapSize;
|
2012-10-02 04:57:33 +00:00
|
|
|
}
|
2012-09-28 04:16:48 +00:00
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.gc = function () {
|
|
|
|
if (window.GCController)
|
|
|
|
window.GCController.collect();
|
|
|
|
else {
|
|
|
|
function gcRec(n) {
|
|
|
|
if (n < 1)
|
|
|
|
return {};
|
|
|
|
var temp = {i: "ab" + i + (i / 100000)};
|
|
|
|
temp += "foo";
|
|
|
|
gcRec(n-1);
|
|
|
|
}
|
|
|
|
for (var i = 0; i < 1000; i++)
|
|
|
|
gcRec(10);
|
|
|
|
}
|
|
|
|
};
|
2012-10-01 23:48:17 +00:00
|
|
|
|
|
|
|
function logInDocument(text) {
|
|
|
|
if (!document.getElementById("log")) {
|
2012-10-02 04:57:33 +00:00
|
|
|
var pre = document.createElement("pre");
|
|
|
|
pre.id = "log";
|
2012-10-01 23:48:17 +00:00
|
|
|
document.body.appendChild(pre);
|
2012-08-09 16:21:09 +00:00
|
|
|
}
|
2012-10-01 23:48:17 +00:00
|
|
|
document.getElementById("log").innerHTML += text + "\n";
|
|
|
|
window.scrollTo(0, document.body.height);
|
|
|
|
}
|
|
|
|
|
|
|
|
PerfTestRunner.log = function (text) {
|
|
|
|
if (logLines)
|
|
|
|
logLines.push(text);
|
|
|
|
else
|
|
|
|
logInDocument(text);
|
|
|
|
}
|
|
|
|
|
|
|
|
function logFatalError(text) {
|
|
|
|
PerfTestRunner.log(text);
|
|
|
|
finish();
|
|
|
|
}
|
|
|
|
|
2013-11-27 05:33:17 +00:00
|
|
|
function start(test, runner, doNotLogStart) {
|
2012-10-01 23:48:17 +00:00
|
|
|
if (!test) {
|
2012-10-02 04:57:33 +00:00
|
|
|
logFatalError("Got a bad test object.");
|
2012-10-01 23:48:17 +00:00
|
|
|
return;
|
2012-05-14 19:28:47 +00:00
|
|
|
}
|
2012-10-01 23:48:17 +00:00
|
|
|
currentTest = test;
|
2013-03-03 23:19:31 +00:00
|
|
|
// FIXME: We should be using multiple instances of test runner on Dromaeo as well but it's too slow now.
|
|
|
|
// FIXME: Don't hard code the number of in-process iterations to use inside a test runner.
|
2014-01-15 08:01:52 +00:00
|
|
|
iterationCount = test.customIterationCount || (window.testRunner ? 5 : 20);
|
2013-11-27 05:33:17 +00:00
|
|
|
completedIterations = -1;
|
|
|
|
results = [];
|
|
|
|
jsHeapResults = [];
|
|
|
|
mallocHeapResults = [];
|
Simplify and reformat the output of performance tests inside test runners
https://bugs.webkit.org/show_bug.cgi?id=124496
Reviewed by Antti Koivisto.
PerformanceTests:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output performance tests generate.
Also modernize the output to better support "metric" concept we introduced a while ago.
New output on Dromaeo/dom-attr looks like this:
-----------------------------------------------
Running 5 times
getAttribute -> [1105, 1108, 1134, 1137, 1154]
element.property -> [1634, 1655, 1685, 1696, 1723]
setAttribute -> [646.3536463536464, 651, 651, 656.3436563436563, 658]
element.property = value -> [934, 949, 963, 964, 974]
element.expando = value -> [419, 419.5804195804196, 421.57842157842157, 425.57442557442556, 429]
element.expando -> [501, 517, 519.4805194805194, 521.4785214785214, 525]
1: 117.40644785571585 runs/s
2: 118.84720469666297 runs/s
3: 119.80547640905021 runs/s
4: 120.51886194758805 runs/s
5: 121.51924380569295 runs/s
:Time -> [117.40644785571585, 118.84720469666297, 119.80547640905021, 120.51886194758805, 121.51924380569295] runs/s
mean: 119.619446942942 runs/s
median: 119.80547640905021 runs/s
stdev: 1.5769040458730506 runs/s
min: 117.40644785571585 runs/s
max: 121.51924380569295 runs/s
-----------------------------------------------
* Dromaeo/resources/dromaeorunner.js:
(DRT.progress): Use the new format for subtest reports.
* resources/runner.js:
(.): Declare verboseLogging, which is set to true outside of test runners.
(PerfTestRunner.logInfo): Use verboseLogging instead of directly checking window.testRunner.
(PerfTestRunner.logDetail): Added. Logs informative text with a label such as "mean: 123 s" with 4-space indentation.
(PerfTestRunner.logStatistics): Use logDetail.
(.start): Initialize verboseLogging. Also log "Running 20 times" as an informative log using logDetail.
(.ignoreWarmUpAndLog): Use logDetail for showing the progress. These logs were useless inside test runners anyway
because perftest didn't get to see any output until the test finished running.
(.finish): Call logStatistics with metric name as opposed to a label. Each metric name is now prefixed with ':' to be
distinguishable from subtests, making the new format forward compatible.
Tools:
As a preparation to support subtests for Dromaeo and DoYouEvenBench, simplify the output
performance tests generate. Instead of spitting out noise in PerfTestRunner (runner.js)
and ignoring it in PerfTest._filter_output (perftest.py), simply avoid generating it in
the first place.
Also modernize the output to adopt "metric" concept better and make it forward compatible
with subtests.
With this patch, performance tests written using runner.js only produces empty lines or
lines of the following format inside test runners (DumpRenderTree and WebKitTestRunner):
<subtest name> -> [<value 1>, <value 2>, ...]
:<metric name> -> [<value 1>, <value 2>, ...]
This greatly simplifies the parsing logic inside PerfTest._run_with_driver.
* Scripts/webkitpy/performance_tests/perftest.py:
(PerfTest): Removed a bunch of regular expressions that are no longer used.
(PerfTest._run_with_driver): Just parse the values and description and treat everything
else as errors.
* Scripts/webkitpy/performance_tests/perftest_unittest.py:
(TestPerfTest.test_parse_output): Removed the junk.
(TestPerfTest._assert_failed_on_line): Extracted from test_parse_output_with_failing_line,
which was removed in favor of the tests below.
(TestPerfTest.test_parse_output_with_running_five_times): Added.
(TestPerfTest.test_parse_output_with_detailed_info): Added.
(TestPerfTest.test_parse_output_with_statistics): Added.
(TestPerfTest.test_parse_output_with_description): Removed the junk.
(TestPerfTest.test_parse_output_with_subtests): Ditto.
(TestSingleProcessPerfTest.test_use_only_one_process): Ditto.
* Scripts/webkitpy/performance_tests/perftestsrunner_integrationtest.py:
(EventTargetWrapperTestData): Ditto.
(SomeParserTestData): Ditto.
(MemoryTestData): Ditto.
LayoutTests:
Rebaseline the expected result now that the output has been simplified.
* fast/harness/perftests/runs-per-second-log-expected.txt:
Canonical link: https://commits.webkit.org/142730@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@159465 268f45cc-cd09-0410-ab3c-d52691b4dbfc
2013-11-18 23:40:36 +00:00
|
|
|
verboseLogging = !window.testRunner;
|
2013-11-27 05:33:17 +00:00
|
|
|
if (!doNotLogStart) {
|
|
|
|
PerfTestRunner.logInfo('');
|
|
|
|
PerfTestRunner.logInfo("Running " + iterationCount + " times");
|
|
|
|
}
|
2012-12-04 10:00:54 +00:00
|
|
|
if (test.doNotIgnoreInitialRun)
|
|
|
|
completedIterations++;
|
2012-10-17 20:57:46 +00:00
|
|
|
if (runner)
|
|
|
|
scheduleNextRun(runner);
|
2011-10-04 20:16:51 +00:00
|
|
|
}
|
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
function scheduleNextRun(runner) {
|
|
|
|
PerfTestRunner.gc();
|
|
|
|
window.setTimeout(function () {
|
|
|
|
try {
|
2013-01-18 00:35:57 +00:00
|
|
|
if (currentTest.setup)
|
|
|
|
currentTest.setup();
|
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
var measuredValue = runner();
|
|
|
|
} catch (exception) {
|
2012-10-02 04:57:33 +00:00
|
|
|
logFatalError("Got an exception while running test.run with name=" + exception.name + ", message=" + exception.message);
|
2012-10-01 23:48:17 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:34:49 +00:00
|
|
|
completedIterations++;
|
2012-10-01 23:48:17 +00:00
|
|
|
|
|
|
|
try {
|
|
|
|
ignoreWarmUpAndLog(measuredValue);
|
|
|
|
} catch (exception) {
|
2012-10-02 04:57:33 +00:00
|
|
|
logFatalError("Got an exception while logging the result with name=" + exception.name + ", message=" + exception.message);
|
2012-10-01 23:48:17 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2012-11-14 19:34:49 +00:00
|
|
|
if (completedIterations < iterationCount)
|
2012-10-01 23:48:17 +00:00
|
|
|
scheduleNextRun(runner);
|
|
|
|
else
|
|
|
|
finish();
|
|
|
|
}, 0);
|
|
|
|
}
|
2010-12-31 10:01:49 +00:00
|
|
|
|
2013-11-27 05:33:17 +00:00
|
|
|
function ignoreWarmUpAndLog(measuredValue, doNotLogProgress) {
|
2012-10-01 23:48:17 +00:00
|
|
|
var labeledResult = measuredValue + " " + PerfTestRunner.unit;
|
2013-11-27 05:33:17 +00:00
|
|
|
if (completedIterations <= 0) {
|
|
|
|
if (!doNotLogProgress)
|
|
|
|
PerfTestRunner.logDetail(completedIterations, labeledResult + " (Ignored warm-up run)");
|
|
|
|
return;
|
2012-10-01 23:48:17 +00:00
|
|
|
}
|
2013-11-27 05:33:17 +00:00
|
|
|
|
|
|
|
results.push(measuredValue);
|
|
|
|
if (window.internals && !currentTest.doNotMeasureMemoryUsage) {
|
|
|
|
jsHeapResults.push(getUsedJSHeap());
|
|
|
|
mallocHeapResults.push(getUsedMallocHeap());
|
|
|
|
}
|
|
|
|
if (!doNotLogProgress)
|
|
|
|
PerfTestRunner.logDetail(completedIterations, labeledResult);
|
2012-09-28 04:16:48 +00:00
|
|
|
}
|
2012-08-09 16:21:09 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
function finish() {
|
|
|
|
try {
|
2013-11-27 05:33:17 +00:00
|
|
|
var prefix = currentTest.name || '';
|
2012-10-01 23:48:17 +00:00
|
|
|
if (currentTest.description)
|
|
|
|
PerfTestRunner.log("Description: " + currentTest.description);
|
2018-02-01 01:42:34 +00:00
|
|
|
metric = {'fps': 'FrameRate', 'runs/s': 'Runs', 'pt': 'Score', 'ms': 'Time'}[PerfTestRunner.unit];
|
2014-01-17 06:06:36 +00:00
|
|
|
var suffix = currentTest.aggregator ? ':' + currentTest.aggregator : '';
|
2014-01-15 08:01:52 +00:00
|
|
|
PerfTestRunner.logStatistics(results, PerfTestRunner.unit, prefix + ":" + metric + suffix);
|
2012-10-01 23:48:17 +00:00
|
|
|
if (jsHeapResults.length) {
|
2013-11-27 05:33:17 +00:00
|
|
|
PerfTestRunner.logStatistics(jsHeapResults, "bytes", prefix + ":JSHeap");
|
|
|
|
PerfTestRunner.logStatistics(mallocHeapResults, "bytes", prefix + ":Malloc");
|
2012-10-01 23:48:17 +00:00
|
|
|
}
|
2013-11-27 05:33:17 +00:00
|
|
|
if (logLines && !currentTest.continueTesting)
|
|
|
|
logLines.forEach(logInDocument);
|
2013-11-27 16:12:34 +00:00
|
|
|
if (currentTest.done)
|
|
|
|
currentTest.done();
|
2012-10-01 23:48:17 +00:00
|
|
|
} catch (exception) {
|
2012-10-02 04:57:33 +00:00
|
|
|
logInDocument("Got an exception while finalizing the test with name=" + exception.name + ", message=" + exception.message);
|
2012-10-01 23:48:17 +00:00
|
|
|
}
|
2012-08-09 16:21:09 +00:00
|
|
|
|
2013-11-27 05:33:17 +00:00
|
|
|
if (!currentTest.continueTesting) {
|
|
|
|
if (window.testRunner)
|
|
|
|
testRunner.notifyDone();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
currentTest = null;
|
2012-10-01 23:48:17 +00:00
|
|
|
}
|
2012-05-14 05:23:37 +00:00
|
|
|
|
2012-10-17 20:57:46 +00:00
|
|
|
PerfTestRunner.prepareToMeasureValuesAsync = function (test) {
|
|
|
|
PerfTestRunner.unit = test.unit;
|
|
|
|
start(test);
|
|
|
|
}
|
|
|
|
|
2012-12-04 10:00:54 +00:00
|
|
|
PerfTestRunner.measureValueAsync = function (measuredValue) {
|
2012-11-14 19:34:49 +00:00
|
|
|
completedIterations++;
|
2012-10-17 20:57:46 +00:00
|
|
|
|
|
|
|
try {
|
|
|
|
ignoreWarmUpAndLog(measuredValue);
|
|
|
|
} catch (exception) {
|
|
|
|
logFatalError("Got an exception while logging the result with name=" + exception.name + ", message=" + exception.message);
|
2013-11-18 15:28:47 +00:00
|
|
|
return false;
|
2012-10-17 20:57:46 +00:00
|
|
|
}
|
|
|
|
|
2013-11-18 15:28:47 +00:00
|
|
|
if (completedIterations >= iterationCount) {
|
2012-10-17 20:57:46 +00:00
|
|
|
finish();
|
2013-11-18 15:28:47 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
2012-10-17 20:57:46 +00:00
|
|
|
}
|
|
|
|
|
2013-11-27 05:33:17 +00:00
|
|
|
PerfTestRunner.reportValues = function (test, values) {
|
|
|
|
PerfTestRunner.unit = test.unit;
|
|
|
|
start(test, null, true);
|
|
|
|
for (var i = 0; i < values.length; i++) {
|
|
|
|
completedIterations++;
|
|
|
|
ignoreWarmUpAndLog(values[i], true);
|
|
|
|
}
|
|
|
|
finish();
|
|
|
|
}
|
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
PerfTestRunner.measureTime = function (test) {
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.unit = "ms";
|
2012-10-01 23:48:17 +00:00
|
|
|
start(test, measureTimeOnce);
|
2012-01-31 20:05:37 +00:00
|
|
|
}
|
2012-05-14 05:23:37 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
function measureTimeOnce() {
|
|
|
|
var start = PerfTestRunner.now();
|
|
|
|
var returnValue = currentTest.run();
|
|
|
|
var end = PerfTestRunner.now();
|
2012-09-28 04:16:48 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
if (returnValue - 0 === returnValue) {
|
2013-01-15 09:42:25 +00:00
|
|
|
if (returnValue < 0)
|
|
|
|
PerfTestRunner.log("runFunction returned a negative value: " + returnValue);
|
2012-10-01 23:48:17 +00:00
|
|
|
return returnValue;
|
|
|
|
}
|
2012-09-28 04:16:48 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
return end - start;
|
|
|
|
}
|
2012-01-19 21:50:52 +00:00
|
|
|
|
2012-10-17 22:06:52 +00:00
|
|
|
PerfTestRunner.measureRunsPerSecond = function (test) {
|
2012-10-02 04:57:33 +00:00
|
|
|
PerfTestRunner.unit = "runs/s";
|
2012-10-01 23:48:17 +00:00
|
|
|
start(test, measureRunsPerSecondOnce);
|
|
|
|
}
|
2012-01-27 22:03:37 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
function measureRunsPerSecondOnce() {
|
|
|
|
var timeToRun = 750;
|
|
|
|
var totalTime = 0;
|
|
|
|
var numberOfRuns = 0;
|
2012-05-14 05:23:37 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
while (totalTime < timeToRun) {
|
|
|
|
totalTime += callRunAndMeasureTime(callsPerIteration);
|
|
|
|
numberOfRuns += callsPerIteration;
|
2012-11-14 19:34:49 +00:00
|
|
|
if (completedIterations < 0 && totalTime < 100)
|
2012-10-01 23:48:17 +00:00
|
|
|
callsPerIteration = Math.max(10, 2 * callsPerIteration);
|
|
|
|
}
|
2012-05-14 05:23:37 +00:00
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
return numberOfRuns * 1000 / totalTime;
|
2012-05-14 05:23:37 +00:00
|
|
|
}
|
|
|
|
|
2012-10-01 23:48:17 +00:00
|
|
|
function callRunAndMeasureTime(callsPerIteration) {
|
|
|
|
var startTime = PerfTestRunner.now();
|
|
|
|
for (var i = 0; i < callsPerIteration; i++)
|
|
|
|
currentTest.run();
|
|
|
|
return PerfTestRunner.now() - startTime;
|
|
|
|
}
|
2012-01-27 22:03:37 +00:00
|
|
|
|
2016-12-13 00:12:55 +00:00
|
|
|
PerfTestRunner.startCheckingResponsiveness = function() {
|
|
|
|
lastResponsivenessTimestamp = PerfTestRunner.now();
|
|
|
|
_longestResponsivenessDelay = 0;
|
|
|
|
continueCheckingResponsiveness = true;
|
|
|
|
|
|
|
|
var timeoutFunction = function() {
|
|
|
|
var now = PerfTestRunner.now();
|
|
|
|
var delta = now - lastResponsivenessTimestamp;
|
|
|
|
if (delta > _longestResponsivenessDelay)
|
|
|
|
_longestResponsivenessDelay = delta;
|
|
|
|
|
|
|
|
lastResponsivenessTimestamp = now;
|
|
|
|
if (continueCheckingResponsiveness)
|
|
|
|
setTimeout(timeoutFunction, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
timeoutFunction();
|
|
|
|
}
|
|
|
|
|
|
|
|
PerfTestRunner.stopCheckingResponsiveness = function() {
|
|
|
|
continueCheckingResponsiveness = false;
|
|
|
|
}
|
|
|
|
|
|
|
|
PerfTestRunner.longestResponsivenessDelay = function() {
|
|
|
|
return _longestResponsivenessDelay;
|
|
|
|
}
|
2012-10-23 22:03:55 +00:00
|
|
|
|
|
|
|
PerfTestRunner.measurePageLoadTime = function(test) {
|
|
|
|
test.run = function() {
|
2012-10-24 06:43:28 +00:00
|
|
|
var file = PerfTestRunner.loadFile(test.path);
|
|
|
|
if (!test.chunkSize)
|
|
|
|
this.chunkSize = 50000;
|
|
|
|
|
2012-10-23 22:03:55 +00:00
|
|
|
var chunks = [];
|
|
|
|
// The smaller the chunks the more style resolves we do.
|
|
|
|
// Smaller chunk sizes will show more samples in style resolution.
|
|
|
|
// Larger chunk sizes will show more samples in line layout.
|
|
|
|
// Smaller chunk sizes run slower overall, as the per-chunk overhead is high.
|
2012-10-24 06:43:28 +00:00
|
|
|
var chunkCount = Math.ceil(file.length / this.chunkSize);
|
2012-10-23 22:03:55 +00:00
|
|
|
for (var chunkIndex = 0; chunkIndex < chunkCount; chunkIndex++) {
|
2012-10-24 06:43:28 +00:00
|
|
|
var chunk = file.substr(chunkIndex * this.chunkSize, this.chunkSize);
|
2012-10-23 22:03:55 +00:00
|
|
|
chunks.push(chunk);
|
|
|
|
}
|
|
|
|
|
2012-10-24 06:43:28 +00:00
|
|
|
PerfTestRunner.logInfo("Testing " + file.length + " byte document in " + chunkCount + " " + this.chunkSize + " byte chunks.");
|
2012-10-23 22:03:55 +00:00
|
|
|
|
|
|
|
var iframe = document.createElement("iframe");
|
|
|
|
document.body.appendChild(iframe);
|
|
|
|
|
|
|
|
iframe.sandbox = ''; // Prevent external loads which could cause write() to return before completing the parse.
|
|
|
|
iframe.style.width = "600px"; // Have a reasonable size so we're not line-breaking on every character.
|
|
|
|
iframe.style.height = "800px";
|
|
|
|
iframe.contentDocument.open();
|
|
|
|
|
|
|
|
for (var chunkIndex = 0; chunkIndex < chunks.length; chunkIndex++) {
|
|
|
|
iframe.contentDocument.write(chunks[chunkIndex]);
|
|
|
|
// Note that we won't cause a style resolve until we've encountered the <body> element.
|
|
|
|
// Thus the number of chunks counted above is not exactly equal to the number of style resolves.
|
|
|
|
if (iframe.contentDocument.body)
|
|
|
|
iframe.contentDocument.body.clientHeight; // Force a full layout/style-resolve.
|
|
|
|
else if (iframe.documentElement.localName == 'html')
|
|
|
|
iframe.contentDocument.documentElement.offsetWidth; // Force the painting.
|
|
|
|
}
|
|
|
|
|
|
|
|
iframe.contentDocument.close();
|
|
|
|
document.body.removeChild(iframe);
|
|
|
|
};
|
|
|
|
|
|
|
|
PerfTestRunner.measureTime(test);
|
|
|
|
}
|
|
|
|
|
2012-10-02 04:57:33 +00:00
|
|
|
window.PerfTestRunner = PerfTestRunner;
|
2012-10-01 23:48:17 +00:00
|
|
|
})();
|