Axios get a file from URL and upload to s3
Asked Answered
B

3

9

I'm trying to get files from a site using axios.get, and then uploading it directly to S3. However, the files are corrupted or not encoded properly, and can't be opened after upload. File types range from .jpg, .png to .pdf. Here is my code:

axios.get(URL, {
  responseEncoding: 'binary',
  responseType: 'document',
}).then((response) => {
  return new Promise((resolve, reject) => {
    const s3Bucket = nconf.get('AWS_S3_BUCKET');

    s3.upload({
      'ACL': 'public-read',
      'Body': response.data,
      'Bucket': s3Bucket,
      'Key': `static/${filePath}/${fileManaged.get('filename')}`,
    }, function(err) {
      if (err) {
        return reject(err);
      }
    });
  });
});

I've tried modifying responseType to arraybuffer and creating a buffer using Buffer.from(response.data, 'binary').toString('base64'), to no avail. What am I missing?

Bunns answered 5/5, 2020 at 3:8 Comment(1)
B
19

I was able to get it working by using an arraybuffer and the .putObject function instead of .upload

axios.get(encodeURI(url), {
  responseType: 'arraybuffer',
}).then((response) => {
s3.putObject({
  'ACL': 'public-read',
  'Body': response.data,
  'Bucket': s3Bucket,
  'Key': `static/${filePath}/${fileManaged.get('filename')}`,
} function(err) {
Bunns answered 5/5, 2020 at 19:9 Comment(2)
I have been searching all over stackoverflow, thankfully your answer worked!Unsheathe
It's giving this Error <ref *1> MapiError { message: 'Unknown filetype', type: 'HttpError', statusCode: 422,}Blondy
N
1

Axios encodes the response body in utf8.
You should use other library like request.

Nowicki answered 5/5, 2020 at 8:54 Comment(0)
V
0

the response from John Xu is correct. But in my case I had to add: Buffer.from(image.data, 'utf8') as stated above in order to get the correct buffer similar to a request response. here is my code:

const AWS = require('aws-sdk');
const axios = require('axios');

/**
 * saveImage              saves an image file into s3
 * @param {*} fullname    absolute path and file name of the file to be uploaded
 * @param {*} filecontent buffer of the image file
 */
var uploadFile = async function (s3_creds, fullname, filecontent, filetype) {
    const s3 = new AWS.S3(s3_creds);
    return new Promise((resolve, reject) => {
        // Add a file to a Space
        
        var params = {
            Key: fullname, // absolute path of the file
            Body: filecontent, 
            Bucket: "docserpcloud",
            ACL: "public-read", // or private
            ContentEncoding: 'binary', 
            ContentType: filetype 
        };
        // console.log(params)
        s3.putObject(params, function (err, data) {
            if (err) {
                console.log(err, err.stack);
                reject(err)
            } else {
                resolve(data);
                console.log(data);
            }
        });
    })
}

var getFilefromURL = async function (imageuri) {
    // console.log (imageuri)
    return new Promise((resolve, reject) => {
        try {
            axios.get(encodeURI(imageuri), {
                responseType: "arraybuffer"
            }).then((response) => {
                resolve(response)
            })
        } catch (err) {
            reject(err)
        }
    })
}


/**
 * saveImageFromUrl    gest a file from an url and saves a copy on s3 bucket
 * @param {*} imageuri full URL to an image
 * @param {*} fullname absolute path and filename of the file to be writen on s3
 */
var saveFileFromUrl = async function (s3_creds, imageuri, fullname) {
    return new Promise((resolve, reject) => {
        getFilefromURL(imageuri).then(image => {
            // console.log(image.res)
            uploadFile(s3_creds, fullname, Buffer.from(image.data, 'utf8'), image.headers['content-type']).then(s3response => {
                resolve(s3response)
            }).catch(err => {
                reject(err)
            })
        }).catch(err => {
            reject(err)
        })
    })
}


module.exports = {
    uploadFile: uploadFile,
    getFilefromURL: getFilefromURL,
    saveFileFromUrl: saveFileFromUrl
}

 async function main() {
     try {
         var s3_creds = {
             "accessKeyId": "acessid",
             "endpoint": "xxxx.digitaloceanspaces.com",
             "secretAccessKey": "Vttkia0....."
         };
         await saveFileFromUrl(s3_creds, "https://gitlab.com/qtree/erpcloud_logos/-/raw/master/pdf_logo2.png?inline=true", 'media/pddd.png');
     } catch {}
 }

main();

update s3_creds to fit your credentials and run it to upload the pdf logo.

Regards, Enrique

Virgenvirgie answered 30/4, 2021 at 5:42 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.