I am trying to understand whether to use JS Object or Map if I need random lookups by string key in large datasets (>1000 objects).
I wrote a simple benchmark http://jsperf.com/javascript-objects-vs-map-performance and the results show that in Chrome (V8) objects outperform maps in around 2 times. However, I checked other browsers and the results were the opposite. Why are they that different in various browsers/engines?
I also wrote a similar test in Node.JS and I can't see similar results (test case 6 took much more than test case 4):
Tests
var now = require("performance-now");
var mapKeyValue = new Map();
var mapStringKeyValue = new Map();
var objectKeyValue = {};
var n = 10000;
var testSamples = 100;
var firstRow = 0;
var firstRowString = firstRow + "";
var middleRow = Math.floor(n / 2);
var middleRowString = middleRow + "";
var lastRow = n - 1;
var lastRowString = lastRow + "";
var nonExist = n * 2;
var nonExistString = nonExist + "";
function makeid() {
var text = "";
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for (var i = 0; i < 20; i++)
text += possible.charAt(Math.floor(Math.random() * possible.length));
return text;
}
for (var i = 0; i < n; i++) {
var value = makeid();
mapKeyValue.set(i, value);
mapStringKeyValue.set(i + "", value);
objectKeyValue[i + ""] = value;
}
var t0, t1;
var averages = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
for (var j = 0; j < testSamples; j++) {
var k = 0;
t0 = now();
mapKeyValue.get(firstRow);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapStringKeyValue.get(firstRowString);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
objectKeyValue[firstRowString];
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapKeyValue.get(middleRow);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapStringKeyValue.get(middleRowString);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
objectKeyValue[middleRowString];
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapKeyValue.get(lastRow);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapStringKeyValue.get(lastRowString);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
objectKeyValue[lastRowString];
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapKeyValue.get(nonExist);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
mapStringKeyValue.get(nonExistString);
t1 = now();
averages[k++] += (t1 - t0);
t0 = now();
objectKeyValue[nonExistString];
t1 = now();
averages[k++] += (t1 - t0);
}
console.log("Test samples number " + testSamples);
for (var i = 0; i < averages.length; i++) {
averages[i] /= testSamples;
console.log("Test case " + (i + 1) + " took in average " + (averages[i] * 1000000) + " ns");
}
Results
Test samples number 100
Test case 1 took in average 2050.269999999692 ns
Test case 2 took in average 751.2899999997202 ns
Test case 3 took in average 567.3000000004081 ns
Test case 4 took in average 727.2699999999688 ns
Test case 5 took in average 4760.029999999489 ns
Test case 6 took in average 1939.3400000004135 ns
Test case 7 took in average 673.549999999885 ns
Test case 8 took in average 689.3600000002564 ns
Test case 9 took in average 541.3700000001143 ns
Test case 10 took in average 1146.0599999999843 ns
Test case 11 took in average 3096.7699999998285 ns
Test case 12 took in average 644.7400000000058 ns
Let me know if you have any ideas on how to improve the benchmark and make it more accurate. Thank you.