denisgmag
denisgmag

Reputation: 81

How can I upload an FTP file to firebase storage using Cloud Functions for Firebase?

Within the same firebase project and using a cloud function (written in node.js), I first download an FTP file (using npm ftp module) and then try to upload it into the firebase storage.

Every attempts failed so far and documentation doesn't help...any expert advices/tips would be greatly appreciated?

The following code uses two different approaches : fs.createWriteStream() and bucket.file().createWriteStream(). Both failed but for different reasons (see error messages in the code).

'use strict'

// [START import]
let admin = require('firebase-admin')
let functions = require('firebase-functions')
const gcpStorage = require('@google-cloud/storage')()
admin.initializeApp(functions.config().firebase)    
var FtpClient = require('ftp')
var fs = require('fs')
// [END import]

// [START Configs]
// Firebase Storage is configured with the following rules and grants read write access to everyone
/*
service firebase.storage {
  match /b/{bucket}/o {
    match /{allPaths=**} {
      allow read, write;
    }
  }
}
*/
// Replace this with your project id, will be use by: const bucket = gcpStorage.bucket(firebaseProjectID)
const firebaseProjectID = 'your_project_id'
// Public FTP server, uploaded files are removed after 48 hours ! Upload new ones when needed for testing
const CONFIG = {
  test_ftp: {
    source_path: '/48_hour',
    ftp: {
      host: 'ftp.uconn.edu'
    }
  }
}
const SOURCE_FTP  = CONFIG.test_ftp
// [END Configs]

// [START saveFTPFileWithFSCreateWriteStream]
function saveFTPFileWithFSCreateWriteStream(file_name) {
  const ftpSource = new FtpClient()
  ftpSource.on('ready', function() {
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
      if (err) throw err
      stream.once('close', function() { ftpSource.end() })
      stream.pipe(fs.createWriteStream(file_name))
      console.log('File downloaded: ', file_name)
    })
  })
  ftpSource.connect(SOURCE_FTP.ftp)
}
// This fails with the following error in firebase console:
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
// [END saveFTPFileWithFSCreateWriteStream]

// [START saveFTPFileWithBucketUpload]    
function saveFTPFileWithBucketUpload(file_name) {
  const bucket = gcpStorage.bucket(firebaseProjectID)
  const file = bucket.file(file_name)
  const ftpSource = new FtpClient()
  ftpSource.on('ready', function() {
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
      if (err) throw err
      stream.once('close', function() { ftpSource.end() })
      stream.pipe(file.createWriteStream())
      console.log('File downloaded: ', file_name)
    })
  })
  ftpSource.connect(SOURCE_FTP.ftp)
}    
// [END saveFTPFileWithBucketUpload]

// [START database triggers]
// Listens for new triggers added to /ftp_fs_triggers/:pushId and calls the saveFTPFileWithFSCreateWriteStream
// function to save the file in the default project storage bucket
exports.dbTriggersFSCreateWriteStream = functions.database
  .ref('/ftp_fs_triggers/{pushId}')
  .onWrite(event => {
    const trigger = event.data.val()
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
    return saveFTPFileWithFSCreateWriteStream(trigger.file_name)
    // This fails with the following error in firebase console:
    // Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
  })
// Listens for new triggers added to /ftp_bucket_triggers/:pushId and calls the saveFTPFileWithBucketUpload
// function to save the file in the default project storage bucket
exports.dbTriggersBucketUpload = functions.database
  .ref('/ftp_bucket_triggers/{pushId}')
  .onWrite(event => {
    const trigger = event.data.val()
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
    return saveFTPFileWithBucketUpload(trigger.file_name)
    // This fails with the following error in firebase console:
    /*
    Error: Uncaught, unspecified "error" event. ([object Object])
    at Pumpify.emit (events.js:163:17)
    at Pumpify.onerror (_stream_readable.js:579:12)
    at emitOne (events.js:96:13)
    at Pumpify.emit (events.js:188:7)
    at Pumpify.Duplexify._destroy (/user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:184:15)
    at /user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:175:10
    at _combinedTickCallback (internal/process/next_tick.js:67:7)
    at process._tickDomainCallback (internal/process/next_tick.js:122:9)
    */
  })
// [END database triggers]

Upvotes: 3

Views: 8785

Answers (1)

denisgmag
denisgmag

Reputation: 81

I've finally found the correct way to implement this.

1) Make sure the bucket is correctly referenced. Initially I just used my project_id without the '.appspot.com' at the end'.

const bucket = gsc.bucket('<project_id>.appspot.com')

2) Create a bucket stream first then pipe the stream from the FTP get call to the bucketWriteStream. Note that file_name will be the name of the saved file (this file does not have to exist beforehand).

ftpSource.get(filePath, function(err, stream) {
  if (err) throw err
  stream.once('close', function() { ftpSource.end() })

  // This didn't work !
  //stream.pipe(fs.createWriteStream(fileName))

  // This works...
  let bucketWriteStream = bucket.file(fileName).createWriteStream()
  stream.pipe(bucketWriteStream)
})

Et voilà, works like a charm...

Upvotes: 4

Related Questions