I am building a tree-like data structure out of associative arrays. Each key is 1-2 characters. Keys are unique to their respective level. There will be no more than 40 keys on the root level and no more than 5 keys on each subsequent levels of the tree. It might look something like this:
{a:{b:null,c:null},de:{f:{g:null}},h:null,i:null,j:null,k:null}
Initially, I thought that creating so many objects with so few keys (on average, < 3) would be inefficient and memory intensive. In that case, I would implement my own hash table like so:
//Suppose keys is a multi-dimensional array [[key,data],...]
var hash = function(keys){
var max = keys.length*3, tbl = [];
//Get key hash value
var code = function(key){
return (key.charCodeAt(0)*31)%max;
}
//Get key values
this.get(key){
//2 character keys actually have a separate hash generation algorithm...
//we'll ignore them for now
var map = code(key), i=map;
//Find the key value
while(true){
if (typeof tbl[i] == 'undefined') return false;
if (code(tbl[i][0]) == map && tbl[i][0] == key) return tbl[i][1];
else i = (i+1)%max;
}
}
//Instantiate the class
for (var i=0; i<keys.length; i++){
var index = code(keys[i][0]);
while(typeof tbl[index] != 'undefined')
index = (index+1)%max;
tbl[index] = keys[i];
}
}
Then, I read somewhere that JavaScript's arrays are sometimes implemented as associative arrays when sparsely filled, which could defeat the purpose of making my own hash structure. But I'm not sure. So, which would be more efficient, in terms of memory and speed?