HireLee
HireLee

Reputation: 573

NodeJS - Manipulate Image before Sending to AWS S3

Ok this has somewhat perplexed me today, I'm sure there's a simple solution. The problem is image manipulation before sending to S3, this is how I manipulated images before saving and before using AWS:

    // Form
    var form = new formidable.IncomingForm();
    form.parse(req, function(err, fields, files) {
        res.writeHead(200, {'content-type': 'text/plain'});
    });
    // Form On
    form.on('end', function(fields, files) {
        /* Temporary location of our uploaded file */
        var temp_path = this.openedFiles[0].path;
        /* The file name of the uploaded file */
        var file_name = this.openedFiles[0].name;
        /* Location where we want to copy the uploaded file */
        var new_location = 'public/assets/uploads/blog/';
        // Rename Image
        var t = path.extname(file_name);
        var n = Math.floor(new Date() / 1000);
        // Copy Image
        fse.copy(temp_path, new_location+n+t, function(err) {  
            if (err) {
                console.error(err);
            } else {

                //
                // Resize and Blur 
                //

                require('lwip').open(new_location+n+t, function(err, image) {
                    image.batch()
                        //.scale(0.75)          // scale to 75%
                        //.rotate(45, 'white')  // rotate 45degs clockwise (white fill)
                        //.crop(200, 200)       // crop a 200X200 square from center
                        .blur(7)                // Gaussian blur with SD=5
                        .writeFile('public/assets/uploads/blog/blur/'+n+t, function(err){
                        // check err...
                        if(err) {
                            console.log(err);
                        }
                        // done.
                        console.log('Success');
                        // Send Data
                        res.write(n+t);
                        res.end();
                    });
                });
            }
        });
  });

Pretty simple stuff right. All I'm doing is using formidable to process the incoming form, and then using 'lwip' to resize (if desired) and blurring a copy of the image (placing in new directory)

So how could i go about using 'lwip' in resizing and blurring before sending that data off to AWS S3?

Upvotes: 0

Views: 1574

Answers (1)

HireLee
HireLee

Reputation: 573

Ok so I'm answering my own question, which is cool. This is what I came up with which worked both locally and on Heroku. I used the module s3-uploader.

var client = s3.createClient({
    maxAsyncS3: 20,     // this is the default 
    s3RetryCount: 0,    // this is the default 
    s3RetryDelay: 1000, // this is the default 
    multipartUploadThreshold: 20971520, 
    multipartUploadSize: 15728640, 
    s3Options: {
        accessKeyId: "key",
        secretAccessKey: "secret",
    },
});
exports.s3 = function(req, res, next) {

    // Formidable
    var form = new formidable.IncomingForm();
    form.parse(req, function(err, fields, files) {
       res.writeHead(200, {'content-type': 'text/plain'});
    });


    form.on('end', function(fields, files) {
       /* Temporary location of our uploaded file */
       var temp_path = this.openedFiles[0].path;
       /* The file name of the uploaded file */
       var file_name = this.openedFiles[0].name;
       /* Location where we want to copy the uploaded file */
       var new_location = 'public/assets/uploads/s3/';
       // Rename Image
       var e = path.extname(file_name);
       var n = Math.floor(new Date() / 1000);

    // Copy Image 

    fse.copy(temp_path, new_location+n+e, function(err) {  

        // AWS Params
        var params = {
          localFile:  new_location+n+e,
          s3Params: {
            Bucket: "hirelee-uploads",
            Key: "blog/"+n+e,
          },
        };
        // AWS Upload
        var uploader = client.uploadFile(params);
        uploader.on('error', function(err) {
          console.error("unable to upload:", err.stack);
        });
        uploader.on('end', function() {

            // Blur Copied Image

            require('lwip').open(params.localFile, function(err, image) {
                image.batch()
                    .blur(7)                
                    .writeFile("public/assets/uploads/s3/blur-"+n+e, function(err){
                    // check err...
                    if(err) {
                        console.log(err);
                    } else { 

                        // AWS Upload Blur

                        var params = {
                            localFile: "public/assets/uploads/s3/blur-"+n+e,

                            s3Params: {
                                Bucket: "hirelee-uploads",
                                Key: "blog/blur/"+n+e,
                            },
                        };

                        var uploader = client.uploadFile(params);

                        uploader.on('error', function(err) {
                            console.error("unable to upload:", err.stack);
                        });

                        // Finished AWS upload
                        uploader.on('end', function() {
                            console.log("done uploading");

                        // Delete Copied Images on Disk

                 fs.unlinkSync("public/assets/uploads/s3/"+n+e)
                 fs.unlinkSync("public/assets/uploads/s3/blur-"+n+e)

                            res.end();
                        });
                    }
                });
            });

        });

    });

If anyone can give me a shout if I have missed anything that would be great or could make the above more efficient.

Upvotes: 2

Related Questions