Jak obliczyć średnią tablicy w JavaScript
const arrAvg = arr => arr.reduce((a,b) => a + b, 0) / arr.length
TechWhizKid
const arrAvg = arr => arr.reduce((a,b) => a + b, 0) / arr.length
const scores = [ 20, 50, 75, 100, 115 ];
let total = 0;
for ( let i = 0; i < scores.length; i++ ) {
total += scores[i];
}
console.log( total / scores.length );
const avg = arr => {
const sum = arr.reduce((acc, cur) => acc + cur);
const average = sum/arr.length;
return average;
}
console.log(avg([1, 2, 3, 7, 8]));
var sum = array.reduce((a, b) => a + b, 0); //get sum of all elements in array
var avg = (sum / array.length) || 0; //get average of all elements in array ;)
var total = 0;
for(var i = 0; i < grades.length; i++) {
total += grades[i];
}
var avg = total / grades.length;
const average = (...args) => args.reduce((a, b) => a + b) / args.length;
// Example
average(1, 2, 3, 4); // 2.5