Node.js with Amazon SDK for S3 uploads

Patrick Lu
Quant Five
Published in
2 min readJan 17, 2018
Amazon’s documentation is anything but happy

Amazon’s documentation is notoriously horrible. On one of our projects, we needed to upload files to s3 with node.js. Getting the configuration right took a lot longer than it should’ve. The issue was setting up the credentials for the server — we kept running into a 403: "Access Denied"

The issue was that AWS was taking the credentials I had from ~/aws/credentials, but I wanted to use a separate set of credentials.

So to get the right credentials, here are the steps:

  1. Install the the node AWS-sdk with
npm install --save aws-sdk

2. Import the SDK into your project

var AWS = require('aws-sdk');

3. Instantiate your s3 instance with your credentials

const s3 = new AWS.S3({
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY_ID,
}
});

I like to put my secrets into environment variables. It’s definitely not a good idea to hard code your secrets there into plaintext.

4. Convert your file to binary & put the object into s3

var base64data = new Buffer(data, 'binary');
s3.putObject({
Bucket: #YOUR BUCKET HERE,
Key: #name of your file,
Body: base64data,
},function (resp) {
log.info(arguments);
log.info('Successfully uploaded package.');
});

What this looks like in a route for express

const express = require('express');
const fetch = require('node-fetch');
var multer = require('multer');
var FormData = require('form-data');
const getLogger = require('../../logger').getLogger;
const log = getLogger('route');
const asyncMiddleware = require('./middleware').asyncMiddlewarevar storage = multer.memoryStorage();
var upload = multer({ storage: storage });
const router = express.Router();
var AWS = require('aws-sdk');
require('dotenv').config();
const s3 = new AWS.S3({
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY_ID,
}
});
const route = asyncMiddleware(async (req, res, next) => {
var attrs = req.body;
var user = req.user;
var file = req.file;
const formData = new FormData();
formData.append('route', new Buffer(file.buffer));
return fetch(url, {method: 'post', body: formData})
.then(utils.checkStatus)
.then(utils.parseJSON)
.then((json) => {
if (process.env.NODE_ENV === 'production') {
try {
var base64data = new Buffer(data, 'binary');
s3.putObject({
Bucket: 'bucketname',
Key: 'key',
Body: base64data,
},function (resp) {
log.info(arguments);
log.info('Successfully uploaded package.');
});
} catch (e) {
log.error({ error: e }, "Error uploading to s3");
}
}

return res.json(json);
})
.catch(error => {
log.error({ error: error }, "Error fetching");
return error.response.text()
.then(errorText => {
log.error({ errorText: errorText }, "Error processing");
return res.status(error.response.status).json({error: errorText});
})
})
});
router.post('/route/', upload.single('file'), route);module.exports = router;

Boom, an easy solution & much needed documentation missing from AWS’s Node.js SDK.

--

--