Reputation: 237
I have two arrays, one of them represents data, and the other one - intervals. Both are sorted and their start and end values match. I go through nested for loops to calculate the average of data points in a given interval. As a result, I end up with one data value for each interval. For smaller size arrays, < 100-500 length, these linear loops do the work, however, this approach becomes an issue with several thousand data points. Any recommendation will be appreciated.
Please see a simplified code below with a link to JSfiddle at the end
var TimelineArray = [0, 10, 20, 30, 40, 40, 60, 70, 80, 90, 100],
DataArray = [0, 2, 4, 5, 8, 11, 19, 22, 24, 25, 30, 31, 38, 39, 51, 56, 57, 58, 59, 64, 74, 76, 89, 91, 92, 94, 98, 100],
DataArrayA = [];
for (i = 0; i < TimelineArray.length-1; i++) {
var dataPointsInGivenTimeInterval = [];
for (j = 0; j < DataArray.length; j++) {
if (DataArray[j] > TimelineArray[i] && DataArray[j] <= TimelineArray[i+1]) {
dataPointsInGivenTimeInterval.push(DataArray[j]);
}
};
if (dataPointsInGivenTimeInterval.length == 0) {
DataArrayA.push(null);
}
else {
var sumOfdataPoints = null;
for (k = 0; k < dataPointsInGivenTimeInterval.length; k++) {
sumOfdataPoints += dataPointsInGivenTimeInterval[k];
}
var avg = sumOfdataPoints / dataPointsInGivenTimeInterval.length;
DataArrayA.push(avg);
}
} // end for
console.log(TimelineArray);
console.log(DataArrayA);
.as-console-wrapper {
max-height: 100% !important;
top: 0;
}
The console output is
[0, 10, 20, 30, 40, 40, 60, 70, 80, 90, 100]
[4.75, 15, 25.25, 36, null, 56.2, 64, 75, 89, 95]
Here is the code at JSfiddle - calculating average values for given intervals
Upvotes: 1
Views: 632
Reputation: 83709
Not sure if it'll be any faster, but here's a crack at it in a different way:
var TimelineArray = [0, 10, 20, 30, 40, 40, 60, 70, 80, 90, 100],
DataArray = [0, 2, 4, 5, 8, 11, 19, 22, 24, 25, 30, 31, 38, 39, 51, 56, 57, 58, 59, 64, 74, 76, 89, 91, 92, 94, 98, 100],
DataArrayA = [];
function avg(arr){
if(arr!= null && arr.length > 0)
return arr.reduce(function(a, b){ return a+b;}, 0) / arr.length;
return null;
}
for(var i = 0; i < TimelineArray.length-1; i++){
var interval = [TimelineArray[i], TimelineArray[i+1]];
var data = DataArray.filter(function(a){ return a > interval[0] && a <= interval[1]});
DataArrayA.push(avg(data));
}
console.log(DataArrayA);
edit 1: removed a loop.
Upvotes: 0
Reputation: 288120
Since the arrays are sorted, you can do it linearly with respect to the size of the timeline and data:
var timeline = [0, 10, 20, 30, 40, 40, 60, 70, 80, 90, 100],
data = [0, 2, 4, 5, 8, 11, 19, 22, 24, 25, 30, 31, 38, 39, 51, 56, 57, 58, 59, 64, 74, 76, 89, 91, 92, 94, 98, 100];
var averages = new Array(timeline.length - 1);
for (var i = 0, j = 0; i < timeline.length; i++) {
var sum = 0,
items = 0;
for (; data[j] <= timeline[i]; j++) {
sum += data[j];
++items;
}
if(i) averages[i-1] = sum / items;
}
console.log(averages);
.as-console-wrapper {
max-height: 100% !important;
top: 0;
}
Upvotes: 2
Reputation: 6110
You don't need to re-scan DataArray
from the beginning on each iteration.
var TimelineArray = [0, 10, 20, 30, 40, 40, 60, 70, 80, 90, 100];
var DataArray = [0, 2, 4, 5, 8, 11, 19, 22, 24, 25, 30, 31, 38, 39, 51, 56, 57, 58, 59, 64, 74, 76, 89, 91, 92, 94, 98, 100];
var res = [], pos = 0;
TimelineArray.forEach(function(v, i) {
for(var sum = 0, n = 0; DataArray[pos] <= v; n++) {
sum += DataArray[pos++];
}
i && res.push(n ? sum / n : null);
});
console.log(res);
Upvotes: 1