So, I have a couple of questions about performance in JavaScript. I do high performance C/C++/CUDA professionally, so I’m trying to see what of that knowledge is applicable to JavaScript.
In another thread I played with some performance optimization for the Sum Primes challenge. I mostly used C style optimizations, but I think that my performance is reasonable.
repl link
code
// ********************************************************************************
// Sieve of Eratoshenes sumPrimes demo and performance study
// ********************************************************************************
// 
// Performance logging
// 
const performance = require('perf_hooks').performance;
// 
// Debug via console logging flag
// 
const debug = false;
// 
// Brief: Sum primes from 2 to num (inclusive)
//
// Inputs:
// num  upper bound to sum primes (inclusive)
//
// Outputs:
// sum  sum of primes from 2 to num (inclusive)
// 
function sumPrimes(num) {
if (debug) console.log("WARNING  DEBUGGING LOGGING WILL DECREASE PREFORMANCE");
// Bounds checking
if (num <= 1)
return 0;
// Make boolean array of odd numbers
const upper = Math.floor((num  1)/2);
const isPrime = Array(upper).fill(true); // 'Guess' all are prime
if (debug) console.log("  Upper: " + upper);
// Initalize sum
let sum = 2;
// Mark multiples of each prime as false (not prime)
const sqrtUpper = Math.floor((Math.sqrt(num)  1)/2); // Done when i*i marked
for (let i = 0; i <= sqrtUpper; ++i)
// Check if number is multiple of any smaller odd number
if (isPrime[i]) {
// Add to sum
const prime = 2*i+3; // Note that number = index*2+3
sum += prime;
// Mark all multiples of this number as false (not prime)
const primeSqaredIndex = 2*i*i + 6*i + 3; // Everything below prime*2 marked
for (let j = primeSqaredIndex; j < upper; j+=prime)
isPrime[j] = false;
}
if (debug) console.log("  isPrime: " + isPrime);
// Count remaining primes in sum
for (let i = sqrtUpper + 1; i < upper; ++i)
if (isPrime[i])
sum += 2*i+3;
if (debug) console.log("  Sum: " + sum);
// Return
if (debug) console.log("END DEBUGGING OUTPUT");
return sum;
}
// 
// Performance testing
// 
// Test cases
const maxPower = 8;
const numRuns = 25;
console.log("");
console.log("Summary")
console.log("  Number of Test Cases : " + maxPower);
console.log("  Runs Per Test Case : " + numRuns);
console.log("\n\n");
for (let i = 0; i < maxPower; i++) {
/// Log test case
const num = 5*(10**i);
console.log("");
console.log("Test Case " + i);
console.log("");
// Multiple runs
let sum = 0;
let times = [];
for (let j = 0; j < numRuns; j++) {
// Time execution
const t0 = performance.now();
sum = sumPrimes(num);
const t1 = performance.now();
// Log time elapsed
times.push(t1  t0);
}
// Compute stats
const minTime = Math.min(...times);
const maxTime = Math.max(...times);
const avgTime = times.reduce((sum, item) => sum + item, 0) / numRuns;
const variance = times.reduce((sumSqDiff, item) => sumSqDiff + (avgTime  item)**2, 0) / numRuns;
const stdDev = Math.sqrt(variance);
// Output
console.log("  Problem Values");
console.log(" Num : " + num);
console.log(" Result : " + sum);
console.log("  Statistics");
console.log(" Average Time : " + avgTime);
console.log(" Minimum Time : " + minTime);
console.log(" Maximum Time : " + maxTime);
console.log(" Standard Deviation : " + stdDev);
console.log(" Variance : " + variance);
console.log("\n\n");
}
// ********************************************************************************
I noticed two things:

It seems that the first call to
console.time("foo")
was longer than the others by a few tenths of a second. I added a dummy timing run at the start to throw away that data. Is that normal? I couldn’t find anything about it Googling, so I wasn’t sure if it was normal, or something I did, or something repl did. 
It seems like there is a lot of variation in run times. That is in part because of how small my test cases are in some cases, but I was wondering how ‘swingy’ in general the performance of JavaScript can be.
Thanks : )