Reputation: 1161
I have used the connect-busboy to get the uploaded file but my script doesn't emits the file event. I am using angular on the client side.
I am using following code:
req.busboy.on('error', function (fieldname, file, filename, encoding, mimetype) {
console.log(fieldname);
});
req.busboy.on('file', function (fieldname, file, filename, encoding, mimetype) {
//set aws header
var awsHeader = {
Bucket: 'Mybucket',
Key: 'MyImage',
ContentType: mimetype,
ACL: 'public-read'
};
file.uploadFile(file, awsHeader, function (err, res) {
console.log(err);
console.log(res);
});
});
req.busboy.on('finish', function () {
res.send({status: true, message: "File uploaded succesfully."});
});
req.busboy.on('field', function (key, value, keyTruncated, valueTruncated) {
console.log(key);
});
req.pipe(req.busboy);
The finish event gets fired every time I try to upload file.
Upvotes: 0
Views: 458
Reputation: 1744
Looks like you aren't building your file from the multipart form. Before the code below I use node aws-sdk to build an s3Bucket that I can PUT to.
req.busboy.on ( 'file', function ( fieldname, file, filename, encoding, mimetype ) {
if ( !filename ) {
// If filename is not truthy it means there's no file
return res.status ( 400 ).send ( {error: 'no file'} );
}
// Create the initial array containing the stream's chunks
file.fileRead = [];
file.on ( 'data', function ( chunk ) {
// Push chunks into the fileRead array
this.fileRead.push ( chunk );
//you can use this if you want to limit file size on ingest
/*if(this.fileRead.length > 5500000){
return res.status(500 ).send({error:"file too large - 5MB max"})
}*/
} );
file.on ( 'error', function ( err ) {
console.log ( 'Error while buffering the stream: ', err );
} );
file.on ( 'end', function () {
// Concat the chunks into a Buffer
var finalBuffer = Buffer.concat ( this.fileRead );
req.files[fieldname] = {
buffer : finalBuffer,
size : finalBuffer.length,
filename: filename,
mimetype: mimetype
}
var data = {Key: "users/" + req.body.id + '/image/' + req.body.id, Body: req.files[fieldname].buffer, ACL: 'public-read'};
//im not sure how you're uploading to s3 but I use this
s3Bucket.putObject ( data, function ( err, data ) {
if ( err ) {
console.log ( err )
return res.status ( 400 ).send ( 'error during upload' )
} else { //success
}
} )
} );
} );
Upvotes: 1