Javascript AWS SDK S3 boot method with body stream generating empty file

I am trying to use the download method from s3 using ReadableStream from a module fs.

The documentation says that ReadableStream can be used in Bodyparam:

Body - (Buffer, Typed Array, Blob, String, ReadableStream) Object data.

Also a description of the download method :

Loads an arbitrary buffer size, blob, or stream using intelligent parallel processing of parts if the payload is large enough.

Also here: Download the pdf generated in AWS S3 using nodejs aws sdk @shivendra says it can use ReadableStream and it works.

This is my code:

const fs = require('fs')
const S3 = require('aws-sdk/clients/s3')

const s3 = new S3()

const send = async () => {
  const rs = fs.createReadStream('/home/osman/Downloads/input.txt')
  rs.on('open', () => {
    console.log('OPEN')
  })
  rs.on('end', () => {
    console.log('END')
  })
  rs.on('close', () => {
    console.log('CLOSE')
  })
  rs.on('data', (chunk) => {
    console.log('DATA: ', chunk)
  })

  console.log('START UPLOAD')

  const response = await s3.upload({
    Bucket: 'test-bucket',
    Key: 'output.txt',
    Body: rs,
  }).promise()

  console.log('response:')
  console.log(response)
}

send().catch(err => { console.log(err) })

Gets this result:

START UPLOAD
OPEN
DATA: <Buffer 73 6f 6d 65 74 68 69 6e 67>
END
CLOSE
response:
{ ETag: '"d41d8cd98f00b204e9800998ecf8427e"',
  Location: 'https://test-bucket.s3.amazonaws.com/output.txt',
  key: 'output.txt',
  Key: 'output.txt',
  Bucket: 'test-bucket' }

The problem is that my file generated in S3 (output.txt) has 0 bytes.

Does anyone know what I'm doing wrong?

If I pass the buffer on Body, it works.

Body: Buffer.alloc(8 * 1024 * 1024, 'something'), 

But that is not what I want to do. I would like to do this using a stream to generate a file and transfer the stream to S3 while I create it.

+6
source share
2 answers

API, NodeJS ReadableStreams. , 'data', .

const fs = require('fs')
const S3 = require('aws-sdk/clients/s3')

const s3 = new S3()

const send = async () => {
  const rs = fs.createReadStream('/home/osman/Downloads/input.txt')
  rs.on('open', () => {
    console.log('OPEN')
  })
  rs.on('end', () => {
    console.log('END')
  })
  rs.on('close', () => {
    console.log('CLOSE')
  })
  // rs.on('data', (chunk) => {
  //   console.log('DATA: ', chunk)
  // })

  console.log('START UPLOAD')

  const response = await s3.upload({
    Bucket: 'test-bucket',
    Key: 'output.txt',
    Body: rs,
  }).promise()

  console.log('response:')
  console.log(response)
}

send().catch(err => { console.log(err) })

API, 'data', ReadableStream flowing ( , /EventEmitter? , ...). - S3 ReadableStream. rs.on('data'...) await s3.upload(...), . rs.pause() rs.on('data'...) befote await s3.upload(...), .

, ? ...

, .

+3
  • , /home/osman/Downloads/input.txt node.js process
  • putObject

:

const fs = require('fs');
const S3 = require('aws-sdk/clients/s3');

const s3 = new S3();

s3.putObject({
  Bucket: 'test-bucket',
  Key: 'output.txt',
  Body: fs.createReadStream('/home/osman/Downloads/input.txt'),
}, (err, response) => {
  if (err) {
    throw err;
  }
  console.log('response:')
  console.log(response)
});

, async .. await, AWS: S3, .


UPDATE: ManagedUpload

const fs = require('fs');
const S3 = require('aws-sdk/clients/s3');

const s3 = new S3();

const upload = new S3.ManagedUpload({
  service: s3,
  params: {
    Bucket: 'test-bucket',
    Key: 'output.txt',
    Body: fs.createReadStream('/home/osman/Downloads/input.txt')
  }
});

upload.send((err, response) => {
  if (err) {
    throw err;
  }
  console.log('response:')
  console.log(response)
});
0

All Articles