Is there a way to upload to S3 from a url using node.js?
Asked Answered
B

5

14

I found this question, but it doesn't seem to answer my question as I think it's still talking about local files.

I want to take, say, and imgur.com link and upload it to S3 using node. Is knox capable of this or do I need to use something else?

Not sure where to get started.

Blank answered 28/5, 2013 at 23:44 Comment(1)
many answers indicate that the "accepted" answer might not be the correct one, this might need updating and upvoting other relevant answers. There is, of course, no obligation to do so though.Evelineevelinn
C
33

I’m not using knox but the official AWS SDK for JavaScript in Node.js. I issue a request to a URL with {encoding: null} in order to retrieve the data in buffer which can be passed directly to the Body parameter for s3.putObject(). Here below is an example of putting a remote image in a bucket with aws-sdk and request.

var AWS = require('aws-sdk');
var request = require('request');

AWS.config.loadFromPath('./config.json');
var s3 = new AWS.S3();

function put_from_url(url, bucket, key, callback) {
    request({
        url: url,
        encoding: null
    }, function(err, res, body) {
        if (err)
            return callback(err, res);

        s3.putObject({
            Bucket: bucket,
            Key: key,
            ContentType: res.headers['content-type'],
            ContentLength: res.headers['content-length'],
            Body: body // buffer
        }, callback);
    })
}

put_from_url('http://a0.awsstatic.com/main/images/logos/aws_logo.png', 'your_bucket', 'media/aws_logo.png', function(err, res) {
    if (err)
        throw err;

    console.log('Uploaded data successfully!');
});
Capel answered 29/8, 2014 at 8:45 Comment(5)
Exactly same approach I took. My image files get corrupted on S3 :(Pitchblende
+1 for the example. My approach was also similar to this but I was having corrupt upload issues. This code seems to be working for me though.Certes
If you use axios - set header responseType: 'stream'Marje
This worked for me. To avoid the corrupt image issue, I had to ensure the encoding: null was set in the request optionsCrescent
@Alexander, your comment really helped me a lot! Thank you!Sporophyll
P
14

For those that are looking for a solution that doesn't involves callbacks, and prefeers promises, based on @micmia code here is an alternative:

var AWS = require('aws-sdk'),
request = require('request');


const bucketName='yourBucketName';
const bucketOptions = {...Your options};
var s3 = new AWS.S3(options);

function UploadFromUrlToS3(url,destPath){
    return new Promise((resolve,reject)=> {            
        request({
            url: url,
            encoding: null
        }, function(err, res, body) {        
            if (err){
                reject(err);
            }
            var objectParams = {
                ContentType: res.headers['content-type'],
                ContentLength: res.headers['content-length'],
                Key: destPath,
                Body: body 
            };
            resolve(s3.putObject(objectParams).promise());
        });
    });
}

UploadFromUrlToS3(
    'http://a0.awsstatic.com/main/images/logos/aws_logo.png',
    'your/s3/path/aws_logo.png' )
    .then(function() {
        console.log('image was saved...');
    }).catch(function(err) {
        console.log('image was not saved!',err);
    });
Parang answered 20/8, 2018 at 18:46 Comment(0)
E
10

Building on @Yuri's post, for those who would like to use axios instead of request & ES6 syntax for a more modern approach + added the required Bucket property to params (and it downloads any file, not only images):

const uploadFileToS3 = (url, bucket, key) => {
  return axios.get(url, { responseType: "arraybuffer", responseEncoding: "binary" }).then((response) => {
    const params = {
      ContentType: response.headers["content-type"],
      ContentLength: response.data.length.toString(), // or response.header["content-length"] if available for the type of file downloaded
      Bucket: bucket,
      Body: response.data,
      Key: key,
    };
    return s3.putObject(params).promise();
  });
}

uploadFileToS3(<your_file_url>, <your_s3_path>, <your_s3_bucket>)
   .then(() => console.log("File saved!"))
   .catch(error) => console.log(error));
Evelineevelinn answered 18/5, 2020 at 12:4 Comment(0)
D
3

Same thing as the above answer but with fetch:

async function upload(url: string, key: string, bucket: string) {
  const response = await fetch(url)
  const contentType = response.headers.get("content-type") ?? undefined;
  const contentLength =
    response.headers.get("content-length") != null
      ? Number(response.headers.get("content-length"))
      : undefined;

  return s3
    .putObject({
      Bucket: bucket,
      Key: key,
      ContentType: contentType,
      ContentLength: contentLength,
      Body: response.body, // buffer
    })
    .promise();
}
Distinguish answered 2/2, 2021 at 0:35 Comment(0)
B
-1

Yes. There's an example of doing this in the knox README

http.get('http://google.com/doodle.png', function(res){
  var headers = {
      'Content-Length': res.headers['content-length']
    , 'Content-Type': res.headers['content-type']
  };
  client.putStream(res, '/doodle.png', headers, function(err, res){
    // Logic
  });
});
Buntline answered 29/5, 2013 at 3:21 Comment(1)
There's no reason to be sarcastic and rude.Sherr

© 2022 - 2024 — McMap. All rights reserved.