Reading a file from an SFTP server using Node.js and SSH2

I have a rather strange problem reading threads in Node.js. I use SSH2 to create an sftp connection between me and the sftp server. Then I try to create a read stream from the sftp stream. From the data stream passed to the read stream, I add data to the array. When the close event of the read stream occurs, I make a call to Buffer.concat to create concat all the pieces of data that I received in one buffer. This is the same method described in other questions asked here when stack overflows. For example here . However, I cannot use the data I retrieve. It looks like the buffer size is 32 bytes smaller than the file I'm trying to extract (from counting the length of the extracted data). Could this have anything to do with my SFTP connection? Or how do I create my read stream?

If it matters, the file is of type zip. When I try to unzip a file (in Node.js and manually) after reading it to the buffer, it does not work.

After the study, it turned out that:

  • When I use readdir in a file, the file size is correct.
  • Using FTP (JSFTP) against my development FTP server is working fine using the same method above.

Any advice is appreciated!

Here is my code:

var Client = require('ssh2').Client; var m_ssh2Credentials = { host: config.ftpHostName, port: config.ftpPort, username: config.ftpUser, password: config.ftpPassword, readyTimeout: 20000, algorithms: { cipher: ["3des-cbc", "aes256-cbc", "aes192-cbc","aes128-cbc"]} }; ... var conn = new Client(); var dataLength = 0; conn.on('ready', function() { conn.sftp(function(err, sftp) { if (err) { writeToErrorLog("downloadFile(): Failed to open SFTP connection."); } else { writeToLog("downloadFile(): Opened SFTP connection."); } var streamErr = ""; var dataLength = 0; var stream = sftp.createReadStream(config.ftpPath + "/" + m_fileName) stream.on('data', function(d){ data.push(d); dataLength += d.length; }); .on('error', function(e){ streamErr = e; }) .on('close', function(){ if(streamErr) { writeToErrorLog("downloadFile(): Error retrieving the file: " + streamErr); } else { writeToLog("downloadFile(): No error using read stream."); m_fileBuffer = Buffer.concat(data, dataLength); writeToLog("Data length: " + dataLength); writeToLog("downloadFile(): File saved to buffer."); } conn.end(); }); }) }) .on('error', function(err) { writeToErrorLog("downloadFile(): Error connecting: " + err); }).connect(m_ssh2Credentials); 
+5
source share
2 answers

So, after a big investigation, I finally realized that something was wrong with the last bits of data that were transmitted in the "data" event. In my opinion, this seems like an error in reading stream implementation. I was able to get around this problem using more simplified functions (open, fstat, read) in the SSH2 library. This solution works for me. I wanted to share a solution if someone else is facing the same problem.

Work code:

 sftp.open(config.ftpPath + "/" + m_fileName, "r", function(err, fd) { sftp.fstat(fd, function(err, stats) { var bufferSize = stats.size, chunkSize = 16384, buffer = new Buffer(bufferSize), bytesRead = 0, errorOccured = false; while (bytesRead < bufferSize && !errorOccured) { if ((bytesRead + chunkSize) > bufferSize) { chunkSize = (bufferSize - bytesRead); } sftp.read(fd, buffer, bytesRead, chunkSize, bytesRead, callbackFunc); bytesRead += chunkSize; } var totalBytesRead = 0; function callbackFunc(err, bytesRead, buf, pos) { if(err) { writeToErrorLog("downloadFile(): Error retrieving the file."); errorOccured = true; sftp.close(fd); } totalBytesRead += bytesRead; data.push(buf); if(totalBytesRead === bufferSize) { m_fileBuffer = Buffer.concat(data); writeToLog("downloadFile(): File saved to buffer."); sftp.close(fd); m_eventEmitter.emit('downloadFile_Complete'); } } }); 
+7
source

If the byte size (or block size) is optional and you just need to get the file, guess that there is a much better way faster and faster (yes ... the nodejs path!). This is how I use to copy a file:

 function getFile(remoteFile, localFile) { conn.on('ready', function () { conn.sftp(function (err, sftp) { if (err) throw err; var rstream = sftp.createReadStream(remoteFile); var wstream = fs.createWriteStream(localFile); rstream.pipe(wstream); rstream.on('error', function (err) { // To handle remote file issues console.log(err.message); conn.end(); rstream.destroy(); wstream.destroy(); }); rstream.on('end', function () { conn.end(); }); wstream.on('finish', function () { console.log(`${remoteFile} has successfully download to ${localFile}!`); }); }); }).connect(m_ssh2Credentials); } 

Alternatively, you can also try sftp.fastGet() , which uses parallel reads for quick access to the file. fastGet() provides you with a way to show the download progress (if required), in addition to providing the ability to adjust the number of concurrent reads and the block size. To learn more, open this SFTPStream document and find fastGet .

Here is a very quick code:

 sftp.fastGet(remoteFile, localFile, function (err) { if (err) throw err; console.log(`${remoteFile} has successfully download to ${localFile}!`); } 

E.I.V.!

0
source

All Articles