2017-06-14 75 views
-1

在同一个firebase项目中,使用云功能(用node.js编写),首先下载一个FTP文件(使用npm ftp模块),然后尝试将其上传到firebase存储。如何使用Cloud Functions for Firebase将FTP文件上传到Firebase存储?

到目前为止,每一次尝试都失败了,文档没有帮助...任何专家意见和建议将不胜感激?

以下代码使用两种不同的方法:fs.createWriteStream()和bucket.file()。createWriteStream()。两者都失败,但由于不同的原因(请参阅代码中的错误消息)。

'use strict' 

// [START import] 
let admin = require('firebase-admin') 
let functions = require('firebase-functions') 
const gcpStorage = require('@google-cloud/storage')() 
admin.initializeApp(functions.config().firebase)  
var FtpClient = require('ftp') 
var fs = require('fs') 
// [END import] 

// [START Configs] 
// Firebase Storage is configured with the following rules and grants read write access to everyone 
/* 
service firebase.storage { 
    match /b/{bucket}/o { 
    match /{allPaths=**} { 
     allow read, write; 
    } 
    } 
} 
*/ 
// Replace this with your project id, will be use by: const bucket = gcpStorage.bucket(firebaseProjectID) 
const firebaseProjectID = 'your_project_id' 
// Public FTP server, uploaded files are removed after 48 hours ! Upload new ones when needed for testing 
const CONFIG = { 
    test_ftp: { 
    source_path: '/48_hour', 
    ftp: { 
     host: 'ftp.uconn.edu' 
    } 
    } 
} 
const SOURCE_FTP = CONFIG.test_ftp 
// [END Configs] 

// [START saveFTPFileWithFSCreateWriteStream] 
function saveFTPFileWithFSCreateWriteStream(file_name) { 
    const ftpSource = new FtpClient() 
    ftpSource.on('ready', function() { 
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) { 
     if (err) throw err 
     stream.once('close', function() { ftpSource.end() }) 
     stream.pipe(fs.createWriteStream(file_name)) 
     console.log('File downloaded: ', file_name) 
    }) 
    }) 
    ftpSource.connect(SOURCE_FTP.ftp) 
} 
// This fails with the following error in firebase console: 
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native) 
// [END saveFTPFileWithFSCreateWriteStream] 

// [START saveFTPFileWithBucketUpload]  
function saveFTPFileWithBucketUpload(file_name) { 
    const bucket = gcpStorage.bucket(firebaseProjectID) 
    const file = bucket.file(file_name) 
    const ftpSource = new FtpClient() 
    ftpSource.on('ready', function() { 
    ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) { 
     if (err) throw err 
     stream.once('close', function() { ftpSource.end() }) 
     stream.pipe(file.createWriteStream()) 
     console.log('File downloaded: ', file_name) 
    }) 
    }) 
    ftpSource.connect(SOURCE_FTP.ftp) 
}  
// [END saveFTPFileWithBucketUpload] 

// [START database triggers] 
// Listens for new triggers added to /ftp_fs_triggers/:pushId and calls the saveFTPFileWithFSCreateWriteStream 
// function to save the file in the default project storage bucket 
exports.dbTriggersFSCreateWriteStream = functions.database 
    .ref('/ftp_fs_triggers/{pushId}') 
    .onWrite(event => { 
    const trigger = event.data.val() 
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz' 
    return saveFTPFileWithFSCreateWriteStream(trigger.file_name) 
    // This fails with the following error in firebase console: 
    // Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native) 
    }) 
// Listens for new triggers added to /ftp_bucket_triggers/:pushId and calls the saveFTPFileWithBucketUpload 
// function to save the file in the default project storage bucket 
exports.dbTriggersBucketUpload = functions.database 
    .ref('/ftp_bucket_triggers/{pushId}') 
    .onWrite(event => { 
    const trigger = event.data.val() 
    const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz' 
    return saveFTPFileWithBucketUpload(trigger.file_name) 
    // This fails with the following error in firebase console: 
    /* 
    Error: Uncaught, unspecified "error" event. ([object Object]) 
    at Pumpify.emit (events.js:163:17) 
    at Pumpify.onerror (_stream_readable.js:579:12) 
    at emitOne (events.js:96:13) 
    at Pumpify.emit (events.js:188:7) 
    at Pumpify.Duplexify._destroy (/user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:184:15) 
    at /user_code/node_modules/@google-cloud/storage/node_modules/duplexify/index.js:175:10 
    at _combinedTickCallback (internal/process/next_tick.js:67:7) 
    at process._tickDomainCallback (internal/process/next_tick.js:122:9) 
    */ 
    }) 
// [END database triggers] 
+1

请编辑您的问题,包括你的函数相关的代码。 –

+0

对不起,我已经添加了代码,我测试了两种不同的方法,但没有成功。 – denisgmag

回答

1

我终于找到了实现这个的正确方法。

1)确保存储桶被正确引用。最初,我只用 我的project_id没有'.appspot.com'在最后'。

const bucket = gsc.bucket('<project_id>.appspot.com') 

2)首先创建一个存储桶流,然后通过管道将来自FTP get调用的流传送到bucketWriteStream。请注意,file_name将是保存文件的名称(该文件不必事先存在)。

ftpSource.get(filePath, function(err, stream) { 
    if (err) throw err 
    stream.once('close', function() { ftpSource.end() }) 

    // This didn't work ! 
    //stream.pipe(fs.createWriteStream(fileName)) 

    // This works... 
    let bucketWriteStream = bucket.file(fileName).createWriteStream() 
    stream.pipe(bucketWriteStream) 
}) 

的Et瞧,就像一个魅力...

相关问题