Extract zip file from S3 bucket with AWS Lambda function with Node.js and upload to another bucket

10,205

You can use zlib to unzip the buffer that you get from s3.

s3.getObject(params, (err, data) => {
    if (err) {
        console.log(err);
        const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
        console.log(message);
        callback(message);
    } else {
        zlib.gunzip(data.Body, function (err, result) {
            if (err) {
                console.log(err);
            } else {
                var extractedData = result;
                s3.putObject({
                Bucket: "bucketName",
                Key: "filename",
                Body: extractedData,
                ContentType: 'content-type'
                }, function (err) {
                     console.log('uploaded file: ' + err);
                });
            }
        });
    }
});

I think the above function will help you.

Share:
10,205

Related videos on Youtube

abdulbarik
Author by

abdulbarik

I am Full Stack Developer and JavaScript enthusiastic, lives in Mumbai India.

Updated on September 15, 2022

Comments

  • abdulbarik
    abdulbarik about 1 year

    Iam playing with AWS Lambda with Node.js. I have created a lambda function and configure it with S3 event. I want to extract zip file which is uploaded on S3 and upload extracted file to another folder on same bucket.

    I am getting the bucket and file information from following code but after that I don't know how to extract and upload to s3.

    Any suggestion or chunk of code will be helpful for me.

    'use strict';
    
    console.log('Loading function to get all latest object from S3 service');
    
    const aws = require('aws-sdk');
    
    const s3 = new aws.S3({ apiVersion: '2006-03-01' });
    
    
    exports.handler = (event, context, callback) => {
        console.log('Received event:', JSON.stringify(event, null, 2));
    
        // Get the object from the event and show its content type
        const bucket = event.Records[0].s3.bucket.name;
        const key = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
        const params = {
            Bucket: bucket,
            Key: key,
        };
        s3.getObject(params, (err, data) => {
            if (err) {
                console.log(err);
                const message = `Error getting object ${key} from bucket ${bucket}. Make sure they exist and your bucket is in the same region as this function.`;
                console.log(message);
                callback(message);
            } else {
                console.log('CONTENT TYPE:', data.ContentType);
                callback(null, data);
            }
        });
    };
    
  • abdulbarik
    abdulbarik almost 7 years
    Getting error Error: incorrect header check on gunzip
  • Akshay Kumar
    Akshay Kumar almost 7 years
    This error is thrown when the data is not compressed.
  • abdulbarik
    abdulbarik almost 7 years
    How can I resolve this error? Can you update your answer?
  • Raxit Solanki
    Raxit Solanki almost 4 years
    I am also stuck on the same issue. From FAQs I came to know that zlib can not handle ".zip" files, either it should be "gzip" or "Deflate-compressed stream". Any idea on how to handle .zip extensions using zlib?
  • Seba Illingworth
    Seba Illingworth about 2 years
    Correct, zlib does not handle .zip files. Docs here: nodejs.org/api/zlib.html