I'm following this example
// Load the stream
var fs = require('fs'), zlib = require('zlib');
var body = fs.createReadStream('bigfile').pipe(zlib.createGzip());
// Upload the stream
var s3obj = new AWS.S3({params: {Bucket: 'myBucket', Key: 'myKey'}});
s3obj.upload({Body: body}, function(err, data) {
if (err) console.log("An error occurred", err);
console.log("Uploaded the file at", data.Location);
})
And it "works" in that it does everything exactly as expected, EXCEPT that the file arrives on S3 compressed and stays that way.
As far as I can tell there's no auto facility for it to unzip it on S3, so, if your intention is to upload a publicly available image or video (or anything else that the end user is meant to simply consume) the solution appears to leave the uploaded file unzipped like so...
// Load the stream
var fs = require('fs'), zlib = require('zlib');
var body = fs.createReadStream('bigfile');//.pipe(zlib.createGzip()); <-- removing the zipping part
// Upload the stream
var s3obj = new AWS.S3({params: {Bucket: 'myBucket', Key: 'myKey'}});
s3obj.upload({Body: body}, function(err, data) {
if (err) console.log("An error occurred", err);
console.log("Uploaded the file at", data.Location);
})
I'm curious if I'm doing something wrong and if there IS an automatic way to have S3 recognize that the file is arriving zipped and unzip it?
Aucun commentaire:
Enregistrer un commentaire