Saving image saved on s3 using node.js?

I am trying to write an image server that uses node.js to store images on s3. Downloading the image works fine, and I can download and view it correctly using the s3 browser client (I use dragondisk in particular, but I successfully downloaded it along with others), but when I upload it using node and try to write it to drive, I cannot open the file (it says that it may be damaged or use a file format that Preview does not recognize). I am using amazon sdk for node and fs to write a file. I know that you can pass an optional encoding to fs.writeFile, but I tried all of them, and that didn't work. I also tried setting the ContentType to putObject and ResponseContentType to getObject, as well as ContentEncoding and ResponseContentEncoding (and all this in different combinations). The same result. Here is the code:

var AWS = require('aws-sdk') , gm = require('../lib/gm') , uuid = require('node-uui') , fs = require('fs'); AWS.config.loadFromPath('./amazonConfig.json'); var s3 = new AWS.S3(); var bucket = 'myBucketName'; // There other logic here to set the bucket name. exports.upload = function(req, res) { var id = uuid.v4(); gm.format("/path/to/some/image.jpg", function(format){ var key = req.params.dir + "/" + id + "/default." + format; fs.readFile('/path/to/some/image.jpg', function(err, data){ if (err) { console.warn(err); } else { s3.client.putObject({ Bucket: bucket, Key: key, Body: data, ContentType: 'image/jpeg' // I've also tried adding ContentEncoding (in various formats) here. }).done(function(response){ res.status(200).end(JSON.stringify({ok:1, id: id})); }).fail(function(response){ res.status(response.httpResponse.statusCode).end(JSON.stringify(({err: response}))); }); } }); }); }; exports.get = function(req, res) { var key = req.params.dir + "/" + req.params.id + "/default.JPEG"; s3.client.getObject({ Bucket: bucket, Key: key, ResponseContentType: 'image/jpeg' // Tried ResponseContentEncoding here in base64, binary, and utf8 }).done(function(response){ res.status(200).end(JSON.stringify({ok:1, response: response})); var filename = '/path/to/new/image/default.JPEG'; fs.writeFile(filename, response.data.Body, function(err){ if (err) console.warn(err); // This DOES write the file, just not as an image that can be opened. // I've tried pretty much every encoding as the optional third parameter // and I've matched the encodings to the ResponseContentEncoding and // ContentEncoding above (in case it needs to be the same) }); }).fail(function(response){ res.status(response.httpResponse.statusCode).end(JSON.stringify({err: response})); }); }; 

By the way, I use express for routing, so where does req.params come from.

+8
amazon-s3 fs
source share
2 answers

Well, after considerable trial and error, I figured out how to do it. I ended up switching to knox, but presumably you could use a similar strategy with aws-sdk. This is the solution that makes me say: β€œThere must be a better way than this,” but I am pleased that it works at the moment.

 var imgData = ""; client.getFile(key, function(err, fileRes){ fileRes.on('data', function(chunk){ imgData += chunk.toString('binary'); }).on('end', function(){ res.set('Content-Type', pic.mime); res.set('Content-Length', fileRes.headers['content-length']); res.send(new Buffer(imgData, 'binary')); }); }); 

getFile() returns chunks of data as buffers. One would think that you could just pass the results right to the fore, but for some reason this was ONLY the way I could get the service to correctly return the image. It seems redundant to write a buffer for a binary string, just to write it back to the buffer, but hey, if it works, it works. If someone finds a more effective solution, I would like to hear it.

+5
source share

For people who are still struggling with this problem. Here is the approach I used with native aws-sdk.

 var AWS = require('aws-sdk'); AWS.config.loadFromPath('./s3_config.json'); var s3Bucket = new AWS.S3( { params: {Bucket: 'myBucket'} } ); 

inside your router: - ContentType must be set to the content type of the image file

  buf = new Buffer(req.body.imageBinary.replace(/^data:image\/\w+;base64,/, ""),'base64') var data = { Key: req.body.userId, Body: buf, ContentEncoding: 'base64', ContentType: 'image/jpeg' }; s3Bucket.putObject(data, function(err, data){ if (err) { console.log(err); console.log('Error uploading data: ', data); } else { console.log('succesfully uploaded the image!'); } }); 
File

s3_config.json: -

 { "accessKeyId":"xxxxxxxxxxxxxxxx", "secretAccessKey":"xxxxxxxxxxxxxx", "region":"us-east-1" } 
+10
source share

All Articles