Reputation: 229
I'm having trouble with matrix multiplication code in JavaScript. If I run the function below with the following two matrices:
var m1 = [ [ 1, 0, 0 ],
[ 0, 1, 0 ],
[ 1, 1, 0 ],
[ 0, 0, 1 ],
[ 1, 0, 1 ],
[ 0, 1, 1 ],
[ 1, 1, 1 ] ];
var m2 = [ [ '0', '1', '1', '0', '0', '1', '1' ] ];
var matrixMult = function (m1, m2) {
console.log(m1);
console.log(m2);
console.log("m1 length: %d, m2[0].length: %d", m1.length, m2[0].length);
if (m1.length != m2[0].length) {
console.error("Incompatible matrix dimensions for multiplication.");
return false;
}
var result = [];
for (var i = 0; i < m1[0].length; i++) {
result[i] = [];
for (var j = 0; j < m2.length; j++) {
var sum = 0;
for (var k = 0; k < m1.length; k++) {
sum += m1[i][k] * m2[k][j];
}
result[i][j] = sum;
}
}
return result;
}
I get this error:
/path/to/file.js:58
sum += m1[i][k] * m2[k][j];
^
TypeError: Cannot read property '0' of undefined
at matrixMult (...)
What's going wrong? Could the issue be that m2.length
is only 1?
Upvotes: 3
Views: 7016
Reputation: 40448
There is only a m2[0]
, but your inner for
loop runs from 0
to m1.length
, which is bigger than 0
. So when it tries accessing m2[1]
it throws the error.
Also by following definition of matrix multiplication
Multiplication of two matrices is defined only if the number of columns of the left matrix is the same as the number of rows of the right matrix.
(Source: Wikipedia)
you cannot multiply your sample matrixes, because m1
has 3 columns, but m2
has only one row.
EDIT
Now that I understood your question correctly, I wrote a little function that might help you out:
function multiplyMatrix(m1, m2) {
var result = [];
for(var j = 0; j < m2.length; j++) {
result[j] = [];
for(var k = 0; k < m1[0].length; k++) {
var sum = 0;
for(var i = 0; i < m1.length; i++) {
sum += m1[i][k] * m2[j][i];
}
result[j].push(sum);
}
}
return result;
}
multiplyMatrix(m1, m2);
// => [ [2, 4, 2] ]
Upvotes: 8