TTLabs / EvaporateJS

Javascript library for browser to S3 multipart resumable uploads
1.82k stars 207 forks source link

[feature request] Max retries and timeout for upload (add) #451

Open shay8el opened 4 years ago

shay8el commented 4 years ago

Timeout

In case you want to limit your client's upload by time limit you can use Promise.race (Thanks to https://italonascimento.github.io/applying-a-timeout-to-your-promises/) you just need to add:

export default const promiseTimeout = function(ms, promise){

  // Create a promise that rejects in <ms> milliseconds
  let timeout = new Promise((resolve, reject) => {
    let id = setTimeout(() => {
      clearTimeout(id);
      reject('Timed out in '+ ms + 'ms.')
    }, ms)
  })

  // Returns a race between our timeout and the passed in promise
  return Promise.race([
    promise,
    timeout
  ])
}

and then use it like that:

timeoutPromise(5000, evaporate.add(fileConfig))

Max retries

tl;dr use warn callback with retries counter

_warnCallback (errorMessage, retries, bucket, filename) {
    retries++
    console.warn(`Evaporate warning after ${retries} tries`, errorMessage)
    if (retries > MAX_RETRIES) {
      this.cancelUpload(bucket, filename)
    }
    return retries
}
let retries = 0
const fileConfig = {
      file: file,
      name: filename,
      warn: (errorMessage) => { retries = this._warnCallback(errorMessage, retries, bucket, filename) },
    }
const filename = evaporate.add(fileConfig)

For some cases, we might prefer to not race our uploads by timeout (because for example the failure reason that doesn't rely on any time issue), but we still won't want to keep to retry forever, so we can limit our retries by wrapping the uploader like that

class S3Uploader {
  constructor () {
    this.uploaderConfig = {...}
  }
  async createUploader () {
    if (!this.uploader) {
      this.uploader = await Evaporate.create(this.uploaderConfig)
    }
  }
  async uploadPhoto (file, filename) {
    let retries = 0
    const fileConfig = {
      file: file,
      name: filename,
      warn: (errorMessage) => { retries = this._warnCallback(errorMessage, retries, bucket, filename) },
      complete: (xhr, awsObjectKey) => this._completeCallback(awsObjectKey, retries)
    }
    const filename = await this.uploader.add(fileConfig)
    return filename
  }
  async cancelUpload (bucket, filename) {
    const cancelKey = bucket + '/' + filename
    try {
      await this.uploader.cancel(cancelKey)
      console.log(`${cancelKey} Canceled!`)
    } catch (e) {
      console.log(`failed to cancel upload:`, e)
    }
  }
  _warnCallback (errorMessage, retries, bucket, filename) {
    retries++
    console.warn(`Evaporate warning after ${retries} tries`, errorMessage)
    if (retries > MAX_RETRIES) {
      this.cancelUpload(bucket, filename)
    }
    return retries
  }
  _completeCallback (awsObjectKey, retries) {
    if (retries > 0) {
      console.log(`Evaporate complete upload ${awsObjectKey} after ${retries} tries`)
    }
  }
}

export default S3Uploader

then use it like that:

s3Uploader.uploadPhoto(file, filename)