Reputation: 203
function uploadGuideMedia(req, res, next) {
if (!req.file && !req.body.id) {
return res.status(401).send({
status: 401,
data: "Upload a file with 'file' key"
});
}
db.query('SELECT * from guide_media where _id = $1', [req.body.id], function(err, guide) {
if (err) return next(err);
if (guide.rows.length == 0) {
return res.status(404).send({
status: 404,
data: "Guide_media not found"
})
} else {
let name = req.file.path + path.extname(req.file.originalname);
fs.renameSync(req.file.path, name);
req.file.path = name;
var s3bucket = new AWS.S3({
params: {
Bucket: 'mapery-v2'
}
});
fs.readFile(req.file.path, function(err, lq_file) {
s3bucket.createBucket(function() {
var params = {
Key: 'upload-v2/' + req.file.originalname,
Body: lq_file,
ContentType: req.file.mimetype,
ACL: 'public-read'
};
s3bucket.upload(params, function(err, aws_images) {
fs.unlink(req.file.path, function(err) {
db.query('UPDATE guide_media SET image_path = $1 WHERE _id = $2 RETURNING *', [aws_images.Location, req.body.id], function(err, guide_res) {
if (err) return next(err);
return res.status(200).send({
status: 'success',
data: guide_res.rows
});
});
});
})
})
})
}
});
}
Upvotes: 3
Views: 4306
Reputation: 872
Uploading in parts doesn't mean your file will be stored on S3 in multiple parts. If the payload is large enough, the SDK uploads the file in multiple parts concurrently so that uploads can be retried in parts if one of them fails. By default, the SDK uses a partSize of 5MB. You can set the partSize to 10 MB using the following code.
var params = {Bucket: 'bucket', Key: 'key', Body: stream};
var options = {partSize: 10 * 1024 * 1024, queueSize: 1};
s3.upload(params, options, function(err, data) {
console.log(err, data);
});
EDIT: Link to the documentation for the upload method. http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Upvotes: 4