Upload files to Amazon S3 With Dropzone.js issue
Asked Answered
P

3

15

I'm trying to upload files to S3 service using Dropzone.js

I use this tutorial to upload the files directly from the client:

https://devcenter.heroku.com/articles/s3-upload-node - this tutorial doesn't include the implementation with dropzone js (which was a nightmare)

The flow is pretty simple:

  1. Ask from my server to get signature from amazon
  2. get the signed request url + the expected file url from amazon
  3. override dropzone.options.url with the signed request url
  4. call dropzone.processFile to upload the file to the server

The file is uploaded to the server, until here everything is ok, when I'm trying to view the file (in S3 Bucket interface) it seems like the file was not write correctly and i can't view it.

According to the source code the file is upload using FormData object.

Dropzone.prototype.submitRequest = function(xhr, formData, files) {
  return xhr.send(formData);
}

if i change the source code from:

xhr.send(formData)

to

xhr.send(files[0])

Everything works great but i lose to ability to upload multiple files.

This is the dropzone config:

{
   url: 'http://signature_url',
   accept: _dropzoneAcceptCallback,
   method: 'put',
   headers: {
      'x-amz-acl': 'public-read',
      'Accept': '*/*',
      'Content-Type': file.type
   },
   clickable: ['.choose-files'],
   autoProcessQueue: false
}

Request HTTP Headers

Hope it's enough :)

Thanks.

Polychasium answered 30/12, 2015 at 9:41 Comment(0)
S
11

For someone who might also jumped into this question, I'd like to share my working example as well. Note that I went a step further by taking off my own backend and use AWS Lambda (aka. serverless) instead to do the signing job, the concept is the same though.

The architecture

demo-pic

So, basically,

  1. You're signing a PUT upload-able URL, thus you MUST hijack the xhr.send function as you already mentioned.
  2. Instead of relying on Dropzone's FormData to upload multiple files, you can call the processFile inside the accept function. So the upload will starts immediately for each file being accepted and you're able to upload multiple files simultaneously.

The final client-side code

const vm = this

let options = {
  // The URL will be changed for each new file being processing
  url: '/',

  // Since we're going to do a `PUT` upload to S3 directly
  method: 'put',

  // Hijack the xhr.send since Dropzone always upload file by using formData
  // ref: https://github.com/danialfarid/ng-file-upload/issues/743
  sending (file, xhr) {
    let _send = xhr.send
    xhr.send = () => {
      _send.call(xhr, file)
    }
  },

  // Upload one file at a time since we're using the S3 pre-signed URL scenario
  parallelUploads: 1,
  uploadMultiple: false,

  // Content-Type should be included, otherwise you'll get a signature
  // mismatch error from S3. We're going to update this for each file.
  header: '',

  // We're going to process each file manually (see `accept` below)
  autoProcessQueue: false,

  // Here we request a signed upload URL when a file being accepted
  accept (file, done) {
    lambda.getSignedURL(file)
      .then((url) => {
        file.uploadURL = url
        done()
        // Manually process each file
        setTimeout(() => vm.dropzone.processFile(file))
      })
      .catch((err) => {
        done('Failed to get an S3 signed upload URL', err)
      })
  }
}

// Instantiate Dropzone
this.dropzone = new Dropzone(this.$el, options)

// Set signed upload URL for each file
vm.dropzone.on('processing', (file) => {
  vm.dropzone.options.url = file.uploadURL
})

The code above has something related to Vue.js, but the concept is actually framework agnostic, you get the idea. For a full working dropzone component example, please have a look at my GitHub repo.

Demo

demo-gif

Strongwilled answered 28/11, 2016 at 6:10 Comment(0)
C
12

Here's what worked for my on the dropzone init parameters and node S3 signature on the backend:

HTML Frontend Code using Dropzone:

var myDropzone = new Dropzone(dropArea, { 
    url:"#",
    dictDefaultMessage: "Drag n drop or tap here",
    method: "PUT",
    uploadMultiple: false,
    paramName: "file",
    maxFiles: 10,
    thumbnailWidth: 80,
    thumbnailHeight: 80,
    parallelUploads: 20,
    autoProcessQueue: true,
    previewTemplate: dropPreviewTemplate,
    //autoQueue: false, // Make sure the files aren't queued until manually added
    previewsContainer: dropPreviewContainer, // Define the container to display the previews
    clickable: true, //".fileinput-button" // Define the element that should be used as click trigger to select files.
    accept: function(file, cb) {
        //override the file name, to use the s3 signature
        //console.log(file);
        var params = {
          fileName: file.name,
          fileType: file.type,
        };

        //path to S3 signature 
        $.getJSON('/uploader', params).done(function(data) {
            //console.log(data);

          if (!data.signedRequest) {
            return cb('Failed to receive an upload url');
          }

          file.signedRequest = data.signedRequest;
          file.finalURL = data.downloadURL;
          cb();
        }).fail(function() {
          return cb('Failed to receive an upload url');
        });
    },
    sending: function(file, xhr) {

        console.log('sending')
        var _send = xhr.send;
        xhr.setRequestHeader('x-amz-acl', 'public-read');
        xhr.send = function() {
            _send.call(xhr, file);
        }

    },
    processing:function(file){

        this.options.url = file.signedRequest;

    }
    });

Here's the libraries I used on the node.js side

var Crypto = require("crypto"),
    AWS = require("aws-sdk"),

Here's a sample of the CORS config on S3

<?xml version="1.0" encoding="UTF-8"?>
<CORSConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<CORSRule>
    <AllowedOrigin>*</AllowedOrigin>
    <AllowedMethod>PUT</AllowedMethod>
    <AllowedHeader>*</AllowedHeader>
</CORSRule>

Here's the code to generate the S3 Signature on node.js :

        getPolicy:function(req,res)
        {
            var fileId = Crypto.randomBytes(20).toString('hex').toUpperCase();

            var prefix = "bl_";
            var newFileName = prefix+fileId;//req.query.fileName;

            var s3 = new AWS.S3();
            var s3_params = {
                Bucket: BUCKET,
                Key: newFileName,
                Expires: 60,
                ContentType: req.query.fileType,
                ACL: 'public-read'
            };
            s3.getSignedUrl('putObject', s3_params, function(err, data){
                if(err){
                    console.log(err);
                }
                else{
                    var return_data = {
                        signedRequest: data,
                        uploadURL: 'https://'+BUCKET+'.s3.amazonaws.com/'+newFileName,
                        downloadURL: 'http://'+BUCKET+'.s3-website-us-east-1.amazonaws.com/'+newFileName,
                    };
                    res.write(JSON.stringify(return_data));
                    res.end();
                }
            });


        }

Hopefully some of this is helpful.

Crossquestion answered 20/1, 2016 at 7:10 Comment(3)
The dropzone frontend configuration in particular worked for us. Thanks!Hungarian
Thanks, the var _send = xhr.send; xhr.send = function() { _send.call(xhr, file); } did the trick. Any ideas why?Likable
Not sure, if I had to guess something in the function gets reset when setting the header with setRequestHeader. So I had to save the function and call it again.Crossquestion
S
11

For someone who might also jumped into this question, I'd like to share my working example as well. Note that I went a step further by taking off my own backend and use AWS Lambda (aka. serverless) instead to do the signing job, the concept is the same though.

The architecture

demo-pic

So, basically,

  1. You're signing a PUT upload-able URL, thus you MUST hijack the xhr.send function as you already mentioned.
  2. Instead of relying on Dropzone's FormData to upload multiple files, you can call the processFile inside the accept function. So the upload will starts immediately for each file being accepted and you're able to upload multiple files simultaneously.

The final client-side code

const vm = this

let options = {
  // The URL will be changed for each new file being processing
  url: '/',

  // Since we're going to do a `PUT` upload to S3 directly
  method: 'put',

  // Hijack the xhr.send since Dropzone always upload file by using formData
  // ref: https://github.com/danialfarid/ng-file-upload/issues/743
  sending (file, xhr) {
    let _send = xhr.send
    xhr.send = () => {
      _send.call(xhr, file)
    }
  },

  // Upload one file at a time since we're using the S3 pre-signed URL scenario
  parallelUploads: 1,
  uploadMultiple: false,

  // Content-Type should be included, otherwise you'll get a signature
  // mismatch error from S3. We're going to update this for each file.
  header: '',

  // We're going to process each file manually (see `accept` below)
  autoProcessQueue: false,

  // Here we request a signed upload URL when a file being accepted
  accept (file, done) {
    lambda.getSignedURL(file)
      .then((url) => {
        file.uploadURL = url
        done()
        // Manually process each file
        setTimeout(() => vm.dropzone.processFile(file))
      })
      .catch((err) => {
        done('Failed to get an S3 signed upload URL', err)
      })
  }
}

// Instantiate Dropzone
this.dropzone = new Dropzone(this.$el, options)

// Set signed upload URL for each file
vm.dropzone.on('processing', (file) => {
  vm.dropzone.options.url = file.uploadURL
})

The code above has something related to Vue.js, but the concept is actually framework agnostic, you get the idea. For a full working dropzone component example, please have a look at my GitHub repo.

Demo

demo-gif

Strongwilled answered 28/11, 2016 at 6:10 Comment(0)
S
2

There are two separate items that must be dealt with to upload to S3 - authentication and uploading.

Auth

Some possibilities, in order of security:

  1. Make your folder public (either via policy or ACL).
  2. Create a role in IAM that has your preferred limits, and use its keys.
  3. Use STS to issue temporary credentials, either authenticating yourself or using Federation
  4. Generate a pre-signed upload link for every file.

Generating pre-signed links was demonstrated by Aaron Rau.

Using STS is conceptually simpler (no need to sign each link), but is somewhat less secure (the same temp credentials can be used elsewhere until they expire).

If you use federated auth, you can skip the server-side entirely!
Some good tutorials for getting temporary IAM credentials from federated users, are here (for FineUploader, but the mechanism is the same)] and here.

To generate your own temporary IAM credentials you can use the AWS-SDK. An example in PHP:

Server:

<?php
require 'vendor/autoload.php';
use Aws\Result;
use Aws\Sts\StsClient;
$client = new StsClient(['region' => 'us-east-1', 'version' => 'latest']);
$result = $client->getSessionToken();
header('Content-type: application/json');
echo json_encode($result['Credentials']);

Client:

let dropzonesetup = async () => {
    let creds = await fetch('//example.com/auth.php')
        .catch(console.error);

 // If using aws-sdk.js
 AWS.config.credentials = new AWS.Credentials(creds);

Uploading

Either use DropZone natively and amend as needed, or have Dropzone be a front for the aws-sdk.

To use the aws-sdk

You need to include it

<script src="//sdk.amazonaws.com/js/aws-sdk-2.262.1.min.js"></script>

And then update Dropzone to interact with it (based on this tutorial).

let canceled = file => { if (file.s3upload) file.s3upload.abort() }
let options =
    { canceled
    , removedfile: canceled
    , accept (file, done) {
        let params = {Bucket: 'mybucket', Key: file.name, Body: file };
        file.s3upload = new AWS.S3.ManagedUpload({params});
        done();
        }
    }

// let aws-sdk send events to dropzone.
function sendEvents(file) {
    let progress = i => dz.emit('uploadprogress', file, i.loaded * 100 / i.total, i.loaded);
    file.s3upload.on('httpUploadProgress', progress);
    file.s3upload.send(err => err ? dz.emit('error', file, err) : dz.emit('complete', file));
    }

Dropzone.prototype.uploadFiles = files => files.map(sendEvents);
var dz = new Dropzone('#dz', options)

To use DropZone natively

let options = 
    { method: 'put'

    // Have DZ send raw data instead of formData
    , sending (file, xhr) { 
        let _send = xhr.send
        xhr.send = () => _send.call(xhr, file)
        }

    // For STS, if creds is the result of getSessionToken / getFederatedToken
    , headers: { 'x-amz-security-token': creds.SessionToken }

    // Or, if you are using signed URLs (see other answers)
    processing: function(file){ this.options.url = file.signedRequest; }
    async accept (file, done) {
        let url = await fetch('https://example.com/auth.php')
            .catch(err => done('Failed to get an S3 signed upload URL', err));
        file.uploadURL = url
        done()
        }
    }

The above is without testing - have added just the token, but am not sure which headers really needed to be added. Check here, here and here for the docs, and perhaps use FineUploader's implementation as a guide.

Hopefully this will help, and if anyone wants to add a pull request for S3 support (as is in FineUploader), I'm sure it will be appreciated.

Stricker answered 27/6, 2018 at 9:26 Comment(1)
the first option set complains about 'No URL provided.'Fowkes

© 2022 - 2024 — McMap. All rights reserved.