node-formidable / formidable

The most used, flexible, fast and streaming parser for multipart form data. Supports uploading to serverless environments, AWS S3, Azure, GCP or the filesystem. Used in production.
MIT License
7k stars 680 forks source link

Encrypting a stream received via Formidable.js before streaming to S3 fails #939

Closed boriskogan81 closed 1 year ago

boriskogan81 commented 1 year ago

Community

Yes.

No.

Context

v19.4.0

Current

2.0.1

Node

Express, AWS-SDK for S3.

What are you trying to achieve or the steps to reproduce?

I have a file parsing function in Express.js which is taking multiple files coming in via Formidable and streaming them to S3, without storing them on the server:

const parseFile = async (req) => {
    return new Promise((resolve, reject) => {
        const s3Uploads = [];
        const fileWriteStreamHandler = async (file) => {
            const body = new PassThrough();
            const upload = new Upload({
                client, params: {
                    Key: `files/${Date.now().toString()}-${file.originalFilename}`,
                    ContentType: file.mimetype,
                    Bucket: s3Config.bucket,
                    Body: body,
                    Region: s3Config.region
                },
                queueSize: 4,
                partSize: 1024 * 1024 * 5,
                leavePartsOnError: false
            });
            const uploadRequest = upload.done()
                .then((response) => {
                    file.location = response.Location;
                })
                .catch(
                    function(error){
                        console.log(error.message)
                        reject(error)
                    }
                )
            ;
            s3Uploads.push(uploadRequest);
            return body;
        }
    const form = formidable({
        multiples: true,
        fileWriteStreamHandler: fileWriteStreamHandler,
    });

    form.parse(req, (error, fields, files) => {
        if (error) {
            reject(error);
            return;
        }
        Promise.all(s3Uploads)
            .then(() => {
                resolve({ ...fields, ...files });
            })
            .catch(function(error){
                console.log(error.message)
                reject(error)
            }
            );
    });

    });
}

I would like to encrypt the stream, so that the file gets to AWS already encrypted.

This seems like a decent library for the job: https://www.npmjs.com/package/aes-encrypt-stream

The issue is that I'm having a hard time understanding where to hook this functionality into the flow. Is it in the fileWriteStreamHandler function? How can I access the stream Formidable is getting from the request in order to pipe it into the body PassThrough stream? Or is it something I need to pass as a callback to form.on for a certain event? I have multiple files, thus multiple streams, each of which must be encrypted separately.

This, in the fileWriteStreamHandler function:

setPassword(Buffer.from('f8647d5417039b42c88a75897109049378cdfce528a7e015656bd23cd18fb78a', 'hex'));
const stream = new PassThrough();
createEncryptStream(stream).pipe(body);
return body; 

Results in an error: TypeError: this._writeStream.on is not a function from TypeError: this._writeStream.on is not a function at VolatileFile.open (C:\Users...\node_modules\formidable\src\VolatileFile.js:27:23) at IncomingForm._handlePart

What was the result you got?

See above.

What result did you expect?

File encryption and streaming to S3.

GrosSacASac commented 1 year ago

fileWriteStreamHandler should return a writable stream createEncryptStream function returns a readable stream

GrosSacASac commented 1 year ago

Have a look at this working example

import http from 'node:http';
import path from "node:path";
import fs from "node:fs";
import url from "node:url";
import { Writable, PassThrough } from 'node:stream';
import formidable from '../src/index.js';
import { createEncryptStream, createDecryptStream, setPassword } from 'aes-encrypt-stream';

const __filename = url.fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);

setPassword(Buffer.from('f8647d5417039b42c88a75897109049378cdfce528a7e015656bd23cd18fb78a', 'hex'));

const server = http.createServer((req, res) => {
  if (req.url === '/api/upload' && req.method.toLowerCase() === 'post') {
    // parse a file upload
    const form = formidable({
      fileWriteStreamHandler: (file) => {
        const passThrough = new PassThrough();
        const readable = createEncryptStream(passThrough); // pass this for s3 as the body
        const writable = fs.createWriteStream(file.filepath) || new Writable();
        readable.pipe(writable); // ignore for s3
        return passThrough;
      },
      uploadDir:  `${__dirname}/../uploads`
    });

    form.parse(req, (err, fields, files) => {
      if (err) {
        console.error(err);
        res.writeHead(err.httpCode || 400, { 'Content-Type': 'text/plain' });
        res.end(String(err));
        return;
      }
      res.writeHead(200, { 'Content-Type': 'application/json' });
      res.end(JSON.stringify({ fields, files }, null, 2));

    });

    return;
  }

  // show a file upload form
  res.writeHead(200, { 'Content-Type': 'text/html' });
  res.end(`
    <h2>With Node.js <code>"http"</code> module</h2>
    <form action="/api/upload" enctype="multipart/form-data" method="post">
      <div>Text field title: <input type="text" name="title" /></div>
      <div>File: <input type="file" name="file" multiple></div>
      <input type="submit" value="Upload" />
    </form>
  `);
});

server.listen(3000, () => {
  console.log('Server listening on http://localhost:3000 ...');
});

// const writable = Writable();
//     // eslint-disable-next-line no-underscore-dangle
//   writable._write = (chunk, enc, next) => {
//     console.log(chunk.toString());
//     next();
// };
// const readStream = fs.createReadStream('./uploads/e9520a89cdce29115e7d21a00');
// readStream.pipe(createDecryptStream(writable));