| // Copyright 2013 the V8 project authors. All rights reserved. |
| // Copyright (C) 2015-2017 Apple Inc. All rights reserved. |
| // Redistribution and use in source and binary forms, with or without |
| // modification, are permitted provided that the following conditions are |
| // met: |
| // |
| // * Redistributions of source code must retain the above copyright |
| // notice, this list of conditions and the following disclaimer. |
| // * Redistributions in binary form must reproduce the above |
| // copyright notice, this list of conditions and the following |
| // disclaimer in the documentation and/or other materials provided |
| // with the distribution. |
| // * Neither the name of Google Inc. nor the names of its |
| // contributors may be used to endorse or promote products derived |
| // from this software without specific prior written permission. |
| // |
| // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| |
| |
| // Performance.now is used in latency benchmarks, the fallback is Date.now. |
| var performance = performance || {}; |
| performance.now = () => preciseTime() * 1000; |
| |
| // Simple framework for running the benchmark suites and |
| // computing a score based on the timing measurements. |
| |
| |
| // A benchmark has a name (string) and a function that will be run to |
| // do the performance measurement. The optional setup and tearDown |
| // arguments are functions that will be invoked before and after |
| // running the benchmark, but the running time of these functions will |
| // not be accounted for in the benchmark score. |
| function Benchmark(name, doWarmup, doDeterministic, run, setup, tearDown, latencyResult, minIterations) { |
| this.name = name; |
| this.doWarmup = doWarmup; |
| this.doDeterministic = doDeterministic; |
| this.run = run; |
| this.Setup = setup ? setup : function() { }; |
| this.TearDown = tearDown ? tearDown : function() { }; |
| this.latencyResult = latencyResult ? latencyResult : null; |
| this.minIterations = minIterations ? minIterations : 32; |
| } |
| |
| |
| // Benchmark results hold the benchmark and the measured time used to |
| // run the benchmark. The benchmark score is computed later once a |
| // full benchmark suite has run to completion. If latency is set to 0 |
| // then there is no latency score for this benchmark. |
| function BenchmarkResult(benchmark, time, latency) { |
| this.benchmark = benchmark; |
| this.time = time; |
| this.latency = latency; |
| } |
| |
| |
| // Automatically convert results to numbers. Used by the geometric |
| // mean computation. |
| BenchmarkResult.prototype.valueOf = function() { |
| return this.time; |
| } |
| |
| |
| // Suites of benchmarks consist of a name and the set of benchmarks in |
| // addition to the reference timing that the final score will be based |
| // on. This way, all scores are relative to a reference run and higher |
| // scores implies better performance. |
| function BenchmarkSuite(name, reference, benchmarks) { |
| this.name = name; |
| this.reference = reference; |
| this.benchmarks = benchmarks; |
| BenchmarkSuite.suites.push(this); |
| } |
| |
| |
| // Keep track of all declared benchmark suites. |
| BenchmarkSuite.suites = []; |
| |
| // Scores are not comparable across versions. Bump the version if |
| // you're making changes that will affect that scores, e.g. if you add |
| // a new benchmark or change an existing one. |
| BenchmarkSuite.version = '9'; |
| |
| // Override the alert function to throw an exception instead. |
| alert = function(s) { |
| throw "Alert called with argument: " + s; |
| }; |
| |
| |
| // To make the benchmark results predictable, we replace Math.random |
| // with a 100% deterministic alternative. |
| BenchmarkSuite.ResetRNG = function() { |
| Math.random = (function() { |
| var seed = 49734321; |
| return function() { |
| // Robert Jenkins' 32 bit integer hash function. |
| seed = ((seed + 0x7ed55d16) + (seed << 12)) & 0xffffffff; |
| seed = ((seed ^ 0xc761c23c) ^ (seed >>> 19)) & 0xffffffff; |
| seed = ((seed + 0x165667b1) + (seed << 5)) & 0xffffffff; |
| seed = ((seed + 0xd3a2646c) ^ (seed << 9)) & 0xffffffff; |
| seed = ((seed + 0xfd7046c5) + (seed << 3)) & 0xffffffff; |
| seed = ((seed ^ 0xb55a4f09) ^ (seed >>> 16)) & 0xffffffff; |
| return (seed & 0xfffffff) / 0x10000000; |
| }; |
| })(); |
| } |
| |
| |
| // Runs all registered benchmark suites and optionally yields between |
| // each individual benchmark to avoid running for too long in the |
| // context of browsers. Once done, the final score is reported to the |
| // runner. |
| BenchmarkSuite.RunSuites = function(runner) { |
| var continuation = null; |
| var suites = BenchmarkSuite.suites; |
| var length = suites.length; |
| BenchmarkSuite.scores = []; |
| var index = 0; |
| function RunStep() { |
| while (continuation || index < length) { |
| if (continuation) { |
| continuation = continuation(); |
| } else { |
| var suite = suites[index++]; |
| if (runner.NotifyStart) runner.NotifyStart(suite.name); |
| continuation = suite.RunStep(runner); |
| } |
| if (continuation && typeof window != 'undefined' && window.setTimeout) { |
| window.setTimeout(RunStep, 25); |
| return; |
| } |
| } |
| |
| // show final result |
| if (runner.NotifyScore) { |
| var score = BenchmarkSuite.GeometricMean(BenchmarkSuite.scores); |
| var formatted = BenchmarkSuite.FormatScore(100 * score); |
| runner.NotifyScore(formatted); |
| } |
| } |
| RunStep(); |
| } |
| |
| |
| // Counts the total number of registered benchmarks. Useful for |
| // showing progress as a percentage. |
| BenchmarkSuite.CountBenchmarks = function() { |
| var result = 0; |
| var suites = BenchmarkSuite.suites; |
| for (var i = 0; i < suites.length; i++) { |
| result += suites[i].benchmarks.length; |
| } |
| return result; |
| } |
| |
| |
| // Computes the geometric mean of a set of numbers. |
| BenchmarkSuite.GeometricMean = function(numbers) { |
| var log = 0; |
| for (var i = 0; i < numbers.length; i++) { |
| log += Math.log(numbers[i]); |
| } |
| return Math.pow(Math.E, log / numbers.length); |
| } |
| |
| |
| // Computes the geometric mean of a set of throughput time measurements. |
| BenchmarkSuite.GeometricMeanTime = function(measurements) { |
| var log = 0; |
| for (var i = 0; i < measurements.length; i++) { |
| log += Math.log(measurements[i].time); |
| } |
| return Math.pow(Math.E, log / measurements.length); |
| } |
| |
| |
| // Computes the average of the worst samples. For example, if percentile is 99, this will report the |
| // average of the worst 1% of the samples. |
| BenchmarkSuite.AverageAbovePercentile = function(numbers, percentile) { |
| // Don't change the original array. |
| numbers = numbers.slice(); |
| |
| // Sort in ascending order. |
| numbers.sort(function(a, b) { return a - b; }); |
| |
| // Now the elements we want are at the end. Keep removing them until the array size shrinks too much. |
| // Examples assuming percentile = 99: |
| // |
| // - numbers.length starts at 100: we will remove just the worst entry and then not remove anymore, |
| // since then numbers.length / originalLength = 0.99. |
| // |
| // - numbers.length starts at 1000: we will remove the ten worst. |
| // |
| // - numbers.length starts at 10: we will remove just the worst. |
| var numbersWeWant = []; |
| var originalLength = numbers.length; |
| while (numbers.length / originalLength > percentile / 100) |
| numbersWeWant.push(numbers.pop()); |
| |
| var sum = 0; |
| for (var i = 0; i < numbersWeWant.length; ++i) |
| sum += numbersWeWant[i]; |
| |
| var result = sum / numbersWeWant.length; |
| |
| // Do a sanity check. |
| if (numbers.length && result < numbers[numbers.length - 1]) { |
| throw "Sanity check fail: the worst case result is " + result + |
| " but we didn't take into account " + numbers; |
| } |
| |
| return result; |
| } |
| |
| |
| // Computes the geometric mean of a set of latency measurements. |
| BenchmarkSuite.GeometricMeanLatency = function(measurements) { |
| var log = 0; |
| var hasLatencyResult = false; |
| for (var i = 0; i < measurements.length; i++) { |
| if (measurements[i].latency != 0) { |
| log += Math.log(measurements[i].latency); |
| hasLatencyResult = true; |
| } |
| } |
| if (hasLatencyResult) { |
| return Math.pow(Math.E, log / measurements.length); |
| } else { |
| return 0; |
| } |
| } |
| |
| |
| // Converts a score value to a string with at least three significant |
| // digits. |
| BenchmarkSuite.FormatScore = function(value) { |
| if (value > 100) { |
| return value.toFixed(0); |
| } else { |
| return value.toPrecision(3); |
| } |
| } |
| |
| // Notifies the runner that we're done running a single benchmark in |
| // the benchmark suite. This can be useful to report progress. |
| BenchmarkSuite.prototype.NotifyStep = function(result) { |
| this.results.push(result); |
| if (this.runner.NotifyStep) this.runner.NotifyStep(result.benchmark.name); |
| } |
| |
| |
| // Notifies the runner that we're done with running a suite and that |
| // we have a result which can be reported to the user if needed. |
| BenchmarkSuite.prototype.NotifyResult = function() { |
| var mean = BenchmarkSuite.GeometricMeanTime(this.results); |
| var score = this.reference[0] / mean; |
| BenchmarkSuite.scores.push(score); |
| if (this.runner.NotifyResult) { |
| var formatted = BenchmarkSuite.FormatScore(100 * score); |
| this.runner.NotifyResult(this.name, formatted); |
| } |
| if (this.reference.length == 2) { |
| var meanLatency = BenchmarkSuite.GeometricMeanLatency(this.results); |
| if (meanLatency != 0) { |
| var scoreLatency = this.reference[1] / meanLatency; |
| BenchmarkSuite.scores.push(scoreLatency); |
| if (this.runner.NotifyResult) { |
| var formattedLatency = BenchmarkSuite.FormatScore(100 * scoreLatency) |
| this.runner.NotifyResult(this.name + "Latency", formattedLatency); |
| } |
| } |
| } |
| } |
| |
| |
| // Notifies the runner that running a benchmark resulted in an error. |
| BenchmarkSuite.prototype.NotifyError = function(error) { |
| if (this.runner.NotifyError) { |
| this.runner.NotifyError(this.name, error); |
| } |
| if (this.runner.NotifyStep) { |
| this.runner.NotifyStep(this.name); |
| } |
| } |
| |
| |
| // Runs a single benchmark for at least a second and computes the |
| // average time it takes to run a single iteration. |
| BenchmarkSuite.prototype.RunSingleBenchmark = function(benchmark, data) { |
| function Measure(data) { |
| var elapsed = 0; |
| var start = new Date(); |
| |
| // Run either for 1 second or for the number of iterations specified |
| // by minIterations, depending on the config flag doDeterministic. |
| for (var i = 0; (benchmark.doDeterministic ? |
| i<benchmark.minIterations : elapsed < 1000); i++) { |
| benchmark.run(); |
| elapsed = new Date() - start; |
| } |
| if (data != null) { |
| data.runs += i; |
| data.elapsed += elapsed; |
| } |
| } |
| |
| // Sets up data in order to skip or not the warmup phase. |
| if (!benchmark.doWarmup && data == null) { |
| data = { runs: 0, elapsed: 0 }; |
| } |
| |
| if (data == null) { |
| Measure(null); |
| return { runs: 0, elapsed: 0 }; |
| } else { |
| Measure(data); |
| // If we've run too few iterations, we continue for another second. |
| if (data.runs < benchmark.minIterations) return data; |
| var usec = (data.elapsed * 1000) / data.runs; |
| var latencySamples = (benchmark.latencyResult != null) ? benchmark.latencyResult() : [0]; |
| var percentile = 99.5; |
| var latency = BenchmarkSuite.AverageAbovePercentile(latencySamples, percentile) * 1000; |
| this.NotifyStep(new BenchmarkResult(benchmark, usec, latency)); |
| return null; |
| } |
| } |
| |
| |
| // This function starts running a suite, but stops between each |
| // individual benchmark in the suite and returns a continuation |
| // function which can be invoked to run the next benchmark. Once the |
| // last benchmark has been executed, null is returned. |
| BenchmarkSuite.prototype.RunStep = function(runner) { |
| BenchmarkSuite.ResetRNG(); |
| this.results = []; |
| this.runner = runner; |
| var length = this.benchmarks.length; |
| var index = 0; |
| var suite = this; |
| var data; |
| |
| // Run the setup, the actual benchmark, and the tear down in three |
| // separate steps to allow the framework to yield between any of the |
| // steps. |
| |
| function RunNextSetup() { |
| if (index < length) { |
| try { |
| suite.benchmarks[index].Setup(); |
| } catch (e) { |
| suite.NotifyError(e); |
| return null; |
| } |
| return RunNextBenchmark; |
| } |
| suite.NotifyResult(); |
| return null; |
| } |
| |
| function RunNextBenchmark() { |
| try { |
| data = suite.RunSingleBenchmark(suite.benchmarks[index], data); |
| } catch (e) { |
| suite.NotifyError(e); |
| return null; |
| } |
| // If data is null, we're done with this benchmark. |
| return (data == null) ? RunNextTearDown : RunNextBenchmark(); |
| } |
| |
| function RunNextTearDown() { |
| try { |
| suite.benchmarks[index++].TearDown(); |
| } catch (e) { |
| suite.NotifyError(e); |
| return null; |
| } |
| return RunNextSetup; |
| } |
| |
| // Start out running the setup. |
| return RunNextSetup(); |
| } |
| |
| // Copyright 2009 the V8 project authors. All rights reserved. |
| // Copyright (C) 2015 Apple Inc. All rights reserved. |
| // Redistribution and use in source and binary forms, with or without |
| // modification, are permitted provided that the following conditions are |
| // met: |
| // |
| // * Redistributions of source code must retain the above copyright |
| // notice, this list of conditions and the following disclaimer. |
| // * Redistributions in binary form must reproduce the above |
| // copyright notice, this list of conditions and the following |
| // disclaimer in the documentation and/or other materials provided |
| // with the distribution. |
| // * Neither the name of Google Inc. nor the names of its |
| // contributors may be used to endorse or promote products derived |
| // from this software without specific prior written permission. |
| // |
| // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| |
| // This benchmark is based on a JavaScript log processing module used |
| // by the V8 profiler to generate execution time profiles for runs of |
| // JavaScript applications, and it effectively measures how fast the |
| // JavaScript engine is at allocating nodes and reclaiming the memory |
| // used for old nodes. Because of the way splay trees work, the engine |
| // also has to deal with a lot of changes to the large tree object |
| // graph. |
| |
| var Splay = new BenchmarkSuite('Splay', [81491, 2739514], [ |
| new Benchmark("Splay", true, false, |
| SplayRun, SplaySetup, SplayTearDown, SplayLatency) |
| ]); |
| |
| |
| // Configuration. |
| var kSplayTreeSize = 8000; |
| var kSplayTreeModifications = 80; |
| var kSplayTreePayloadDepth = 5; |
| |
| var splayTree = null; |
| var splaySampleTimeStart = 0.0; |
| |
| function GeneratePayloadTree(depth, tag) { |
| if (depth == 0) { |
| return { |
| array : [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ], |
| string : 'String for key ' + tag + ' in leaf node' |
| }; |
| } else { |
| return { |
| left: GeneratePayloadTree(depth - 1, tag), |
| right: GeneratePayloadTree(depth - 1, tag) |
| }; |
| } |
| } |
| |
| |
| function GenerateKey() { |
| // The benchmark framework guarantees that Math.random is |
| // deterministic; see base.js. |
| return Math.random(); |
| } |
| |
| var splaySamples = []; |
| |
| function SplayLatency() { |
| return splaySamples; |
| } |
| |
| function SplayUpdateStats(time) { |
| var pause = time - splaySampleTimeStart; |
| splaySampleTimeStart = time; |
| splaySamples.push(pause); |
| } |
| |
| function InsertNewNode() { |
| // Insert new node with a unique key. |
| var key; |
| do { |
| key = GenerateKey(); |
| } while (splayTree.find(key) != null); |
| var payload = GeneratePayloadTree(kSplayTreePayloadDepth, String(key)); |
| splayTree.insert(key, payload); |
| return key; |
| } |
| |
| |
| function SplaySetup() { |
| // Check if the platform has the performance.now high resolution timer. |
| // If not, throw exception and quit. |
| if (!performance.now) { |
| throw "PerformanceNowUnsupported"; |
| } |
| |
| splayTree = new SplayTree(); |
| splaySampleTimeStart = performance.now() |
| for (var i = 0; i < kSplayTreeSize; i++) { |
| InsertNewNode(); |
| if ((i+1) % 20 == 19) { |
| SplayUpdateStats(performance.now()); |
| } |
| } |
| } |
| |
| |
| function SplayTearDown() { |
| // Allow the garbage collector to reclaim the memory |
| // used by the splay tree no matter how we exit the |
| // tear down function. |
| var keys = splayTree.exportKeys(); |
| splayTree = null; |
| |
| splaySamples = []; |
| |
| // Verify that the splay tree has the right size. |
| var length = keys.length; |
| if (length != kSplayTreeSize) { |
| throw new Error("Splay tree has wrong size"); |
| } |
| |
| // Verify that the splay tree has sorted, unique keys. |
| for (var i = 0; i < length - 1; i++) { |
| if (keys[i] >= keys[i + 1]) { |
| throw new Error("Splay tree not sorted"); |
| } |
| } |
| } |
| |
| |
| function SplayRun() { |
| // Replace a few nodes in the splay tree. |
| for (var i = 0; i < kSplayTreeModifications; i++) { |
| var key = InsertNewNode(); |
| var greatest = splayTree.findGreatestLessThan(key); |
| if (greatest == null) splayTree.remove(key); |
| else splayTree.remove(greatest.key); |
| } |
| SplayUpdateStats(performance.now()); |
| } |
| |
| |
| /** |
| * Constructs a Splay tree. A splay tree is a self-balancing binary |
| * search tree with the additional property that recently accessed |
| * elements are quick to access again. It performs basic operations |
| * such as insertion, look-up and removal in O(log(n)) amortized time. |
| * |
| * @constructor |
| */ |
| function SplayTree() { |
| }; |
| |
| |
| /** |
| * Pointer to the root node of the tree. |
| * |
| * @type {SplayTree.Node} |
| * @private |
| */ |
| SplayTree.prototype.root_ = null; |
| |
| |
| /** |
| * @return {boolean} Whether the tree is empty. |
| */ |
| SplayTree.prototype.isEmpty = function() { |
| return !this.root_; |
| }; |
| |
| |
| /** |
| * Inserts a node into the tree with the specified key and value if |
| * the tree does not already contain a node with the specified key. If |
| * the value is inserted, it becomes the root of the tree. |
| * |
| * @param {number} key Key to insert into the tree. |
| * @param {*} value Value to insert into the tree. |
| */ |
| SplayTree.prototype.insert = function(key, value) { |
| if (this.isEmpty()) { |
| this.root_ = new SplayTree.Node(key, value); |
| return; |
| } |
| // Splay on the key to move the last node on the search path for |
| // the key to the root of the tree. |
| this.splay_(key); |
| if (this.root_.key == key) { |
| return; |
| } |
| var node = new SplayTree.Node(key, value); |
| if (key > this.root_.key) { |
| node.left = this.root_; |
| node.right = this.root_.right; |
| this.root_.right = null; |
| } else { |
| node.right = this.root_; |
| node.left = this.root_.left; |
| this.root_.left = null; |
| } |
| this.root_ = node; |
| }; |
| |
| |
| /** |
| * Removes a node with the specified key from the tree if the tree |
| * contains a node with this key. The removed node is returned. If the |
| * key is not found, an exception is thrown. |
| * |
| * @param {number} key Key to find and remove from the tree. |
| * @return {SplayTree.Node} The removed node. |
| */ |
| SplayTree.prototype.remove = function(key) { |
| if (this.isEmpty()) { |
| throw Error('Key not found: ' + key); |
| } |
| this.splay_(key); |
| if (this.root_.key != key) { |
| throw Error('Key not found: ' + key); |
| } |
| var removed = this.root_; |
| if (!this.root_.left) { |
| this.root_ = this.root_.right; |
| } else { |
| var right = this.root_.right; |
| this.root_ = this.root_.left; |
| // Splay to make sure that the new root has an empty right child. |
| this.splay_(key); |
| // Insert the original right child as the right child of the new |
| // root. |
| this.root_.right = right; |
| } |
| return removed; |
| }; |
| |
| |
| /** |
| * Returns the node having the specified key or null if the tree doesn't contain |
| * a node with the specified key. |
| * |
| * @param {number} key Key to find in the tree. |
| * @return {SplayTree.Node} Node having the specified key. |
| */ |
| SplayTree.prototype.find = function(key) { |
| if (this.isEmpty()) { |
| return null; |
| } |
| this.splay_(key); |
| return this.root_.key == key ? this.root_ : null; |
| }; |
| |
| |
| /** |
| * @return {SplayTree.Node} Node having the maximum key value. |
| */ |
| SplayTree.prototype.findMax = function(opt_startNode) { |
| if (this.isEmpty()) { |
| return null; |
| } |
| var current = opt_startNode || this.root_; |
| while (current.right) { |
| current = current.right; |
| } |
| return current; |
| }; |
| |
| |
| /** |
| * @return {SplayTree.Node} Node having the maximum key value that |
| * is less than the specified key value. |
| */ |
| SplayTree.prototype.findGreatestLessThan = function(key) { |
| if (this.isEmpty()) { |
| return null; |
| } |
| // Splay on the key to move the node with the given key or the last |
| // node on the search path to the top of the tree. |
| this.splay_(key); |
| // Now the result is either the root node or the greatest node in |
| // the left subtree. |
| if (this.root_.key < key) { |
| return this.root_; |
| } else if (this.root_.left) { |
| return this.findMax(this.root_.left); |
| } else { |
| return null; |
| } |
| }; |
| |
| |
| /** |
| * @return {Array<*>} An array containing all the keys of tree's nodes. |
| */ |
| SplayTree.prototype.exportKeys = function() { |
| var result = []; |
| if (!this.isEmpty()) { |
| this.root_.traverse_(function(node) { result.push(node.key); }); |
| } |
| return result; |
| }; |
| |
| |
| /** |
| * Perform the splay operation for the given key. Moves the node with |
| * the given key to the top of the tree. If no node has the given |
| * key, the last node on the search path is moved to the top of the |
| * tree. This is the simplified top-down splaying algorithm from: |
| * "Self-adjusting Binary Search Trees" by Sleator and Tarjan |
| * |
| * @param {number} key Key to splay the tree on. |
| * @private |
| */ |
| SplayTree.prototype.splay_ = function(key) { |
| if (this.isEmpty()) { |
| return; |
| } |
| // Create a dummy node. The use of the dummy node is a bit |
| // counter-intuitive: The right child of the dummy node will hold |
| // the L tree of the algorithm. The left child of the dummy node |
| // will hold the R tree of the algorithm. Using a dummy node, left |
| // and right will always be nodes and we avoid special cases. |
| var dummy, left, right; |
| dummy = left = right = new SplayTree.Node(null, null); |
| var current = this.root_; |
| while (true) { |
| if (key < current.key) { |
| if (!current.left) { |
| break; |
| } |
| if (key < current.left.key) { |
| // Rotate right. |
| var tmp = current.left; |
| current.left = tmp.right; |
| tmp.right = current; |
| current = tmp; |
| if (!current.left) { |
| break; |
| } |
| } |
| // Link right. |
| right.left = current; |
| right = current; |
| current = current.left; |
| } else if (key > current.key) { |
| if (!current.right) { |
| break; |
| } |
| if (key > current.right.key) { |
| // Rotate left. |
| var tmp = current.right; |
| current.right = tmp.left; |
| tmp.left = current; |
| current = tmp; |
| if (!current.right) { |
| break; |
| } |
| } |
| // Link left. |
| left.right = current; |
| left = current; |
| current = current.right; |
| } else { |
| break; |
| } |
| } |
| // Assemble. |
| left.right = current.left; |
| right.left = current.right; |
| current.left = dummy.right; |
| current.right = dummy.left; |
| this.root_ = current; |
| }; |
| |
| |
| /** |
| * Constructs a Splay tree node. |
| * |
| * @param {number} key Key. |
| * @param {*} value Value. |
| */ |
| SplayTree.Node = function(key, value) { |
| this.key = key; |
| this.value = value; |
| }; |
| |
| |
| /** |
| * @type {SplayTree.Node} |
| */ |
| SplayTree.Node.prototype.left = null; |
| |
| |
| /** |
| * @type {SplayTree.Node} |
| */ |
| SplayTree.Node.prototype.right = null; |
| |
| |
| /** |
| * Performs an ordered traversal of the subtree starting at |
| * this SplayTree.Node. |
| * |
| * @param {function(SplayTree.Node)} f Visitor function. |
| * @private |
| */ |
| SplayTree.Node.prototype.traverse_ = function(f) { |
| var current = this; |
| while (current) { |
| var left = current.left; |
| if (left) left.traverse_(f); |
| f(current); |
| current = current.right; |
| } |
| }; |
| |
| function report(msg) |
| { |
| } |
| |
| function start(resultObject) |
| { |
| SplaySetup(); |
| var samples = []; |
| var before = performance.now(); |
| for (var i = 0; i < 10000; ++i) { |
| SplayRun(); |
| var after = performance.now(); |
| samples.push(after - before); |
| before = after; |
| } |
| SplayTearDown(); |
| |
| var scatterData = []; |
| for (var i = 0; i < samples.length; ++i) |
| scatterData.push({x: i + 1, y: samples[i]}); |
| |
| report("JetStream-like Latency Score: " + Math.round(4000 / BenchmarkSuite.AverageAbovePercentile(samples, 99.5))); |
| |
| var sumOfSquares = 0; |
| for (var i = 0; i < samples.length; ++i) |
| sumOfSquares += samples[i] * samples[i]; |
| report("Octane-like Latency Score: " + Math.round(27395.14 / Math.sqrt(sumOfSquares / samples.length))); |
| |
| for (var percentile of [99.5, 95, 87, 75, 50, 0]) |
| report("Average above " + percentile + "%: " + BenchmarkSuite.AverageAbovePercentile(samples, percentile)); |
| |
| resultObject.value = BenchmarkSuite.AverageAbovePercentile(samples, 99.5); |
| } |
| |
| start(arguments[0]); |