Download file from url and upload it to AWS S3 without saving - node.js
32,241
Solution 1
Here's some javascript that does this nicely:
var options = {
uri: uri,
encoding: null
};
request(options, function(error, response, body) {
if (error || response.statusCode !== 200) {
console.log("failed to get image");
console.log(error);
} else {
s3.putObject({
Body: body,
Key: path,
Bucket: 'bucket_name'
}, function(error, data) {
if (error) {
console.log("error downloading image to s3");
} else {
console.log("success uploading to s3");
}
});
}
});
Solution 2
This is what I did and works nicely:
const request = require('request-promise')
const AWS = require('aws-sdk')
const s3 = new AWS.S3()
const options = {
uri: uri,
encoding: null
};
async load() {
const body = await request(options)
const uploadResult = await s3.upload({
Bucket: 'bucket_name',
Key : path,
Body : body,
}).promise()
}
Solution 3
What about something like that:
const stream = require('stream');
const request = require('request');
const s3 = new AWS.S3()
const pass = new stream.PassThrough();
request(url).pipe(pass);
s3.upload({
Bucket: 'bucket_name',
Key: path,
Body: pass,
});
Author by
Loourr
Updated on August 26, 2021Comments
-
Loourr over 2 years
I'm writing an application which downloads images from a url and then uploads it to an S3 bucket using the aws-sdk.
Perviously I was just downloading images and saving them to disk like this.
request.head(url, function(err, res, body){ request(url).pipe(fs.createWriteStream(image_path)); });
And then uploading the images to AWS S3 like this
fs.readFile(image_path, function(err, data){ s3.client.putObject({ Bucket: 'myBucket', Key: image_path, Body: data ACL:'public-read' }, function(err, resp) { if(err){ console.log("error in s3 put object cb"); } else { console.log(resp); console.log("successfully added image to s3"); } }); });
But I would like to skip the part where I save the image to disk. Is there some way I can
pipe
the response fromrequest(url)
to a variable and then upload that? -
Armadillo Jim over 8 yearsAs written, the code loads the entire body into memory at once (as a string into the "body" variable). That is, this does not stream directly from request to S3. OTOH, request will create a Buffer object for "body" if "encoding" is null; see github.com/request/request#requestoptions-callback. I suggested an edit to this answer to change
encoding:'binary'
toencoding:null
and eliminatebody=new Buffer(body,'binary')
. That would remove the need to store the entire "body" in memory, and I think that's in keeping with the original question and answer. But reviews wanted a comment ... -
Ilan lewin over 7 yearsI tried your approach, both with implicit and explicit encoding, I find that my uploaded png files are corrupted for some reason, can't figure out why. Trying to copy this image openclipart.org/image/250px/svg_to_png/264091/MirrorCarp.png and this is what I get on my bucket images.quickhunts.com/clipart/23234234234.png
-
Loourr over 7 years@Ilanlewin It definitely works with
png
images but make sure you're implementing thefs.readFile
correctly. It may have changed since I originally wrote this answer, you may need to be more specific with encoding. Also possibly try somejpg
s or other generic images. -
megapixel23 over 6 yearsI was trying to store PDF from remote URL to S3. But PDF is corrupted after uploading. @ArmadilloJim `s fix with
encoding: null
seems to be working for me. -
CodeCracker over 4 yearsCan we do the same in iOS from the app side?
-
Loourr over 4 yearsI don't see why not. You'll likely need to understand objective C streaming behavior though.
-
s4suryapal over 3 yearshow to get public url in callback of S3 ?
-
elpmid over 3 yearsI want to apply this solution to my app but request module is deprecated I want to migrate the code using axios is ther anyone could help me?
-
insivika over 3 yearsCan you please specify the path param