Saving an image stored on s3 using node.js?

13,646

Solution 1

Ok, after significant trial and error, I've figured out how to do this. I ended up switching to knox, but presumably, you could use a similar strategy with aws-sdk. This is the kind of solution that makes me say, "There has to be a better way than this," but I'm satisfied with anything that works, at this point.

var imgData = "";
client.getFile(key, function(err, fileRes){
    fileRes.on('data', function(chunk){
        imgData += chunk.toString('binary');
    }).on('end', function(){
        res.set('Content-Type', pic.mime);
        res.set('Content-Length', fileRes.headers['content-length']);
        res.send(new Buffer(imgData, 'binary'));
    });
});

getFile() returns data chunks as buffers. One would think you could just pipe the results straight to front end, but for whatever reason, this was the ONLY way I could get the service to return an image correctly. It feels redundant to write a buffer to a binary string, only to write it back into a buffer, but hey, if it works, it works. If anyone finds a more efficient solution, I would love to hear it.

Solution 2

For people who are still struggling with this issue. Here is the approach I used with native aws-sdk.

var AWS = require('aws-sdk');
AWS.config.loadFromPath('./s3_config.json');
var s3Bucket = new AWS.S3( { params: {Bucket: 'myBucket'} } );

inside your router method :- ContentType should be set to the content type of the image file

  buf = new Buffer(req.body.imageBinary.replace(/^data:image\/\w+;base64,/, ""),'base64')
  var data = {
    Key: req.body.userId, 
    Body: buf,
    ContentEncoding: 'base64',
    ContentType: 'image/jpeg'
  };
  s3Bucket.putObject(data, function(err, data){
      if (err) { 
        console.log(err);
        console.log('Error uploading data: ', data); 
      } else {
        console.log('succesfully uploaded the image!');
      }
  });

s3_config.json file is:-

{
  "accessKeyId":"xxxxxxxxxxxxxxxx",
  "secretAccessKey":"xxxxxxxxxxxxxx",
  "region":"us-east-1"
}
Share:
13,646
tandrewnichols
Author by

tandrewnichols

Web application developer at Olive AI in Columbus, OH. I work primarily with node and react/redux and contribute frequently to open source node modules.

Updated on June 28, 2022

Comments

  • tandrewnichols
    tandrewnichols about 2 years

    I'm trying to write an image server that uses node.js to store images on s3. Uploading the image works fine, and I can download and view it correctly using an s3 browser client (I'm using dragondisk, specifically, but I've successfully downloaded it with other ones too), but when I download it with node and try to write it to disk, I'm unable to open the file (it says it may be damaged or use a file format that Preview does not recognize). I'm using the amazon sdk for node and fs to write the file. I know that you can pass an optional encoding to fs.writeFile, but I've tried them all and it doesn't work. I've also tried setting ContentType on putObject and ResponseContentType on getObject, as well as ContentEncoding and ResponseContentEncoding (and all of these things in various combinations). Same result. Here's some code:

    var AWS = require('aws-sdk')
      , gm = require('../lib/gm')
      , uuid = require('node-uui')
      , fs = require('fs');
    
    AWS.config.loadFromPath('./amazonConfig.json');
    var s3 = new AWS.S3();
    
    var bucket = 'myBucketName'; // There's other logic here to set the bucket name.
    
    exports.upload = function(req, res) {
        var id = uuid.v4();
        gm.format("/path/to/some/image.jpg", function(format){
            var key = req.params.dir + "/" + id + "/default." + format;
            fs.readFile('/path/to/some/image.jpg', function(err, data){
                if (err) { console.warn(err); }
                else {
                    s3.client.putObject({
                        Bucket: bucket,
                        Key: key,
                        Body: data,
                        ContentType: 'image/jpeg'
                        // I've also tried adding ContentEncoding (in various formats) here.
                     }).done(function(response){
                        res.status(200).end(JSON.stringify({ok:1, id: id}));
                    }).fail(function(response){
                        res.status(response.httpResponse.statusCode).end(JSON.stringify(({err: response})));
                    });
                }
            });
        });
    };
    
    exports.get = function(req, res) {
        var key = req.params.dir + "/" + req.params.id + "/default.JPEG";
        s3.client.getObject({
            Bucket: bucket, 
            Key:  key,
            ResponseContentType: 'image/jpeg'
            // Tried ResponseContentEncoding here in base64, binary, and utf8
        }).done(function(response){
            res.status(200).end(JSON.stringify({ok:1, response: response}));
            var filename = '/path/to/new/image/default.JPEG';
            fs.writeFile(filename, response.data.Body, function(err){
                if (err) console.warn(err);
                // This DOES write the file, just not as an image that can be opened.
                // I've tried pretty much every encoding as the optional third parameter
                // and I've matched the encodings to the ResponseContentEncoding and
                // ContentEncoding above (in case it needs to be the same)
            });
        }).fail(function(response){
            res.status(response.httpResponse.statusCode).end(JSON.stringify({err: response}));
        });
    };
    

    Incidentally, I'm using express for routing, so that's where req.params comes from.

  • Christopher WJ Rueber
    Christopher WJ Rueber over 10 years
    S3 expects the length of the stream. That said, it's something of a hack, but if you set the .length property on the stream you're handing to the S3 Body on the putObject call, that would have done the trick.
  • Lusha Li
    Lusha Li over 5 years
    My image downloaded from s3 is damaged. Do you know what may be the reason here?
  • Avid Programmer
    Avid Programmer about 5 years
    @ChristopherWJRueber so passing the .length property of the stream to the params ContentLength fixes this issue?