Becca
Becca

Reputation: 189

SFTP createReadStream not writing into s3 bucket

I'm trying to read a file from an sftp server and stream that file into an s3 bucket. I'm not able to stream the file into the s3 bucket. Yes the file path is exactly correct. I'm not sure what I am doing wrong. When I run the code, it doesn't even try to upload the stream into the bucket because I don't get any upload console logs.

const aws = require('aws-sdk');
const s3 = new aws.S3();
const Client = require('ssh2').Client;
const conn = new Client();

const connSettings = {
    host: event.serverHost,
    port: event.port,
    username: event.username,
    password: event.password
};

exports.handler = function(event) {
    conn.on('ready', function() {
        conn.sftp(function(err, sftp) {
            if (err) {
                console.log("Errror in connection", err);
                conn.end()
            } else {
                console.log("Connection established");

                let readStream = sftp.createReadStream(remoteFilePath);
                  console.log(`Read Stream ${readStream}`)
                  // readStream outputs [object Object] to the console

                const uploadParams = {
                    Bucket: s3Bucket,
                    Key: 'fileName',
                    Body: readStream
                }

                s3.upload (uploadParams, function (err, data) {
                    if (err) {
                        console.log("Error", err);
                    } if (data) {
                        console.log("Upload Success", data.Location);
                    }
                });

                conn.end()   
            }
        });
    }).connect(connSettings);
}

I want to be able to stream the readStream from sftp server into s3 bucket.

Upvotes: 1

Views: 1197

Answers (2)

mscdex
mscdex

Reputation: 106696

conn.end() ends the connection immediately. Move that to inside your s3.upload() callback so that your data actually gets transferred before the connection is closed.

Upvotes: 2

amoebob
amoebob

Reputation: 301

This is a working Node 12 example of what I believe you are trying to accomplish:

const aws = require('aws-sdk');
const s3 = new aws.S3();
const Client = require('ssh2').Client;
const conn = new Client();
const { PassThrough } = require('stream');

conn.on('ready', () => {

    conn.sftp((err, sftp) => {

        const transferStream = new PassThrough();

        s3.upload({
            Bucket: s3Bucket,
            Key: 'test_file.txt',
            Body: transferStream
        }, (err, data) => {
            if (err) {
                console.log(`Upload error: ${err}`);
            }
            if (data) {
                console.log(`Uploaded to [${data.Location}].`);
            }
        });

        sftp.createReadStream(remoteFilePath)
            .pipe(transferStream)
            .on('end', () => {
                transferStream.end();
                conn.end();
            });

    });

}).connect(connectionSettings);

Upvotes: 1

Related Questions