I have two arrays:
first arr - has multiple level structure.
second arr - linear array with objects.
In my update function I iterate throw my first array childrens and if condition true, I push objects into attributes.
In production - I have arr2.length - 150000 objects and arr1.length - multiple deep levels.
How I can optimize my function to loop more quickly with a big data? Now, I wait about 5 minutes on iteration.
var arr1 = [{
"item_id": 2,
"item_name": "test",
"children": [{
"item_id": 39646,
"item_name": "test1",
"children": [{
"item_id": 35648,
"item_name": "test2",
"children": [{
"item_id": 35771,
"item_name": "test3",
"children": [],
"attributes": []
}],
}]
}]
}]
var arr2 = [
{
"item_id": 35771,
"attr_value": "test",
}, {
"item_id": 35771,
"attr_value": "test1",
}
]
const update = (array, id, object) => array.forEach(o => o.item_id === id ?
o.attributes.push(object) : update(o.children, id, object)
);
for (let item of arr2) {
update(arr1, item.item_id, item);
}
console.log(arr1)
.some()doesn't make any sense..some()returns true when at least one of the elements of an array satisfies a given predicate. You're misusing it as a.forEach()o.O