File uploading to s3 bucket in Node application using formidable package

Posted By : Dinesh Dhiman | 31-Mar-2015

We demonstrate the example that, how we can upload a file in s3 bucket in our node-application using formidable package. Install aws-sdk , connect-multipart , express and fromidable in your node project

npm install express
npm install aws-sdk
npm install connect-multiparty
npm install formidable@latest
 

Require the multipart and formidable in your application start file app.js or server.js (as you have setup in your application)

 var multipart = require('connect-multiparty');
 var formidable = require('formidable'),
 

Add multipart as middleware function in application

var app = module.exports = express()
app.use(multipart());
 

defined route as follow in my application

router.post('/api/v1/user/fileUpload',configurationHolder.security.authority("anonymous"),function (req,res){
      FileUploadUtility.s3upload(req,res); // we are call s3upload function defined in FileUploadUtility.js file in our application , make sure you //include the file in which you have defined the upload functionality
})
 

Make file UploadUtility.js file

var AWS = require('aws-sdk');
var fs = require('fs');

AWS.config.update({

// here we take access the AWS accessKeyId 
accessKeyId:,

// here we take access the AWS secretAccessKey
secretAccessKey: 
});
var s3upload = function(req,res) {

 // this will take the path of the  file which you want to add into the Bucket
var path = req.files.file.path; 

 // this will be the path of our image after adding into the bucket
var imagepath = "http://s3-ap-southeast-1.amazonaws.com/" + configurationHolder.config.bucketname +"/"+req.files.file.originalFilename;

var s3 = new AWS.S3();

//this wil make a json in which we have bucket name , file name , and file Buffer
fs.readFile(path, function (err, file_buffer) {
var params = {
Bucket: configurationHolder.config.bucketname,// here you add your bucket name
Key: req.files.file.originalFilename, // here you add you file name
Body: file_buffer
};

//this method we use to put file into the bucket 
s3.putObject(params, function (err, res) {
if (err) {
console.log("Error uploading data: " + err);
res.json({"error":true,"message":"Upload failed
Please try later."})
} else {
console.log("upload successfully");
res.json({"error":false,"message":"upload
successfully"})
}
});
});
}
module.exports.s3upload= s3upload;

Issue we faced if you set content-type : multipart/form-data :boundary:---some value ---- then it give an error Issue we faced Issue we faced Error: MultipartParser.end(): stream ended unexpectedly: state = START_BOUNDARY] We simply removed the content-type from the request header and my node-application automatically resolved the content-type for upload based on the extenstion of the file and it start working for us .

Thanks and Regards
Dinesh Dhiman

About Author

Author Image
Dinesh Dhiman

Dinesh is good in java and grails with having extra knowledge in AngularJS , HTML ,SQL , Selenium

Request for Proposal

Name is required

Comment is required

Sending message..