So, I created this cute little lambda that works great locally, but not so much when it really is in the wild.
lambda accepts an event with html in the event source, converts that html to PDF (using the html-pdf node module), transfers this file to the s3 list, and then passes the signed URL, which expires after 60 seconds.
Or at least that's what should happen (again, works locally). When testing on Lambda, I get the following error:
{ "errorMessage": "spawn EACCES", "errorType": "Error", "stackTrace": [ "exports._errnoException (util.js:870:11)", "ChildProcess.spawn (internal/child_process.js:298:11)", "Object.exports.spawn (child_process.js:362:9)", "PDF.PdfExec [as exec] (/var/task/node_modules/html-pdf/lib/pdf.js:87:28)", "PDF.PdfToFile [as toFile] (/var/task/node_modules/html-pdf/lib/pdf.js:83:8)", "/var/task/index.js:72:43", "Promise._execute (/var/task/node_modules/bluebird/js/release/debuggability.js:272:9)", "Promise._resolveFromExecutor (/var/task/node_modules/bluebird/js/release/promise.js:473:18)", "new Promise (/var/task/node_modules/bluebird/js/release/promise.js:77:14)", "createPDF (/var/task/index.js:71:19)", "main (/var/task/index.js:50:5)" ] }
Here the code itself (not compiled, there is a convenient gulp task there)
if(typeof regeneratorRuntime === 'undefined') { require("babel/polyfill") } import fs from 'fs' import pdf from 'html-pdf' import md5 from 'md5' import AWS from 'aws-sdk' import Promise from 'bluebird' import moment from 'moment' const tempDir = '/tmp' const config = require('./config') const s3 = new AWS.S3() export const main = (event, context) => { console.log("Got event: ", event) AWS.config.update({ accessKeyId: config.awsKey, secretAccessKey: config.awsSecret, region: 'us-east-1' }) const filename = md5(event.html) + ".pdf" createPDF(event.html, filename).then(function(result) { uploadToS3(filename, result.filename).then(function(result) { getOneTimeUrl(filename).then(function(result) { return context.succeed(result) }, function(err) { console.log(err) return context.fail(err) }) }, function(err) { console.log(err) return context.fail(err) }) }, function(err) { console.log(err) return context.fail(err) }) } const createPDF = (html, filename) => { console.log("Creating PDF") var promise = new Promise(function(resolve, reject) { pdf.create(html).toFile(filename, function(err, res) { if (err) { reject(err) } else { resolve(res) } }) }) return promise } const uploadToS3 = (filename, filePath) => { console.log("Pushing to S3") var promise = new Promise(function(resolve, reject) { var fileToUpload = fs.createReadStream(filePath) var expiryDate = moment().add(1, 'm').toDate() var uploadParams = { Bucket: config.pdfBucket, Key: filename, Body: fileToUpload } s3.upload(uploadParams, function(err, data) { if(err) { reject(err) } else { resolve(data) } }) }) return promise } const getOneTimeUrl = (filename) => { var promise = new Promise(function(resolve, reject) { var params = { Bucket: config.pdfBucket, Key: filename, Expires: 60 } s3.getSignedUrl('getObject', params, function(err, url) { if (err) { reject(err) } else { resolve(url) } }) }) return promise }
The problem seems to be in html-pdf. I thought this could be a problem with PhantomJS (which html-pdf depends on) due to some reading I did here: https://engineering.fundingcircle.com/blog/2015/04/09/aws-lambda -for-great-victory / , since Lambda encountered a maximum zip size of up to 50 MB, I have no problem downloading a binary file.
Any thoughts?