My objective is to allow users to upload files to S3 using their web browser, my requirements are:
It must handle large files (2GB+)
It must support pause/resume with progress indicator
Ability to resume upload if connection temporarily drops out
I am currently using this on server side-
var AWS = require('aws-sdk');
AWS.config.update({accessKeyId: config.AWS_ACCESS_KEY_ID, secretAccessKey: config.AWS_SECRET_ACCESS_KEY});
AWS.config.region = 'us-west-2';
var s3 = new AWS.S3();
app.post('/api/gets3url/', function(req, res) {
if(req.body.file_name && req.body.mime_type) {
var params = {
Bucket: config.AWS_BUCKET,
Key: req.body.file_name,
Expires: 60*60,
ContentType: req.body.mime_type
};
s3.getSignedUrl('putObject', params, function(err, _url) {
if(err) console.log(err);
return res.json({success: true, url: _url});
});
}
else
return res.json({success: false, err: "insuficient data"});
});
and on the client-
<form action="" method="post" enctype="multipart/form-data" id="myform" name="myform">
<input type="file" name="file" id="file">
</form>
<input type="button" name="submit" id="btn_submit" value="submit">
<script type="text/javascript">
requestURL = function(event) {
var file = $('#file')[0].files[0]
var mime = file.type;
var _data = {file_name: file.name, mime_type: mime, size: file.size};
$.ajax({ //request signed url
url: '/api/gets3url/',
type: 'POST',
data: _data,
}).success(function(res) {
console.log(res);
$.ajax({
url: res.url,
type: 'PUT',
data: file,
processData: false,
contentType: file.type,
}).success(function(res) {
console.log('Done');
});
});
}
$("#btn_submit").bind( "click", requestURL);
</script>
This is working, but it doesn't fulfill the requirements.
Please help. Most of the content I find about this are old.
Aucun commentaire:
Enregistrer un commentaire