ibelem / webnnmeeting

[Deprecated] Please visit https://github.com/ibelem/hybrid-work for up to date version of W3C WebNN API.
9 stars 3 forks source link

FPS drops when integrate refactored BaseRunner.js #2

Closed ibelem closed 4 years ago

ibelem commented 4 years ago
Full Screen Before Refactor Before Refactor After Refactor After Refactor
Inference Time(ms) FPS Inference Time(ms) FPS
DeepLab 224 52.72 16 48.18 12
DeepLab 257 Atrous 45.92 17 41.75 12

Also reproduced with WebML SS Examples.

ibelem commented 4 years ago

assets/js/webnn/util/BaseRunner.js

class BaseRunner {
    // if (src.tagName === 'AUDIO') {
    //   tensorArray = await getTensorArrayByAudio(src, options);
    // } else {
    tensorArray = getTensorArray(src, options);
    // }
}

class SemanticSegmentationRunner extends BaseRunner {
  constructor() {
    super();
    this._labels = null;
  }

  _setLabels = (labels) => {
    this._labels = labels;
  };

  _getLabels = async (url) => {
    const result = await this._loadURL(url);
    this._setLabels(result.split('\n'));
    console.log(`labels: ${this._labels}`);
  };

  _getOtherResources = async () => {
    await this._getLabels(this._currentModelInfo.labelsFile);
  };

  _getOutputTensorTypedArray = () => {
    return Int32Array;
  };

  _updateOutput = (output) => {
    output.labels = this._labels;
  };
}

export { BaseRunner, SemanticSegmentationRunner }
ibelem commented 4 years ago

config.js

semanticsegmentation: {
    modelName: 'Deeplab 257 Atrous (TFLite)',
    format: 'TFLite',
    modelId: 'deeplab_mobilenet_v2_257_atrous_tflite',
    modelSize: '8.4MB',
    modelFile: '../../js/webnn/ss/model/deeplab_mobilenetv2_257_dilated.tflite',
    labelsFile: '../../js/webnn/ss/model/labels.txt',
    isQuantized: false,
    inputSize: [257, 257, 3],
    outputSize: [257, 257, 1],
    preOptions: {
      mean: [127.5, 127.5, 127.5],
      std: [127.5, 127.5, 127.5]
    }

isQuantized: false,

ibelem commented 4 years ago

id.js

    getClippedSize(source) {
      const width = config.semanticsegmentation.inputSize[0]
      const imWidth = source.naturalWidth || source.videoWidth
      const imHeight = source.naturalHeight || source.videoHeight
      const resizeRatio = Math.max(Math.max(imWidth, imHeight) / width, 1)
      const scaledWidth = Math.floor(imWidth / resizeRatio)
      const scaledHeight = Math.floor(imHeight / resizeRatio)
      return [scaledWidth, scaledHeight]
    },
    getSegMap() {
      const output = this.runner.getOutput()
      const segMap = {
        data: output.outputTensor,
        outputShape: config.semanticsegmentation.outputSize,
        labels: output.labels
      }
      return segMap
    },
    customOutput(source) {
      this.renderer.uploadNewTexture(source, this.getClippedSize(source))
      this.renderer.drawOutputs(this.getSegMap())
    },
    output(source) {
      const output = this.runner.getOutput()
      this.inferencetime = output.inferenceTime.toFixed(2)
      this.showfps = fps
      console.log(
        `Inference time: ${this.inferencetime} ms / FPS: ${this.showfps}`
      )
    },
    async predict(source) {
      // const inputSize = config.semanticsegmentation.inputSize
      const drawOptions = {
        inputSize: config.semanticsegmentation.inputSize,
        preOptions: config.semanticsegmentation.preOptions || {},
        imageChannels: 4,
        scaledFlag: true
      }
      // const ret = await this.runner.run(source, drawOptions)
      // return ret
      await this.runner.run(source, drawOptions)
    },
    async predictFrame() {
      const source = this.$refs.localvideo
      await this.predict(source)
      this.output()
      this.renderer.uploadNewTexture(source, this.getClippedSize(source))
      this.stats.begin()
      this.renderer.drawOutputs(this.getSegMap())
      this.stats.end()
      this.sstimer = setTimeout(this.predictFrame, 0)
    },
   async ss(effect) {
      this.initRenderer(effect)
      this.predictFrame()
      console.log('delete stream')
      deleteStream(this.roomId, this.localPublication.id)
    }
ibelem commented 4 years ago

.gitignore

*.tflite
ibelem commented 4 years ago

Update static/js/webnn/util/ to latest

├── base.js
├── decoders
│   ├── SsdDecoder.js
│   └── Yolo2Decoder.js
├── mfcc
│   ├── mfcc.js
│   └── mfcc.wasm
├── onnx
│   ├── onnx.js
│   ├── OnnxModelImporter.js
│   └── OnnxModelUtils.js
├── openvino
│   ├── openvino.js
│   ├── OpenVINOModelImporter.js
│   └── OpenVINOModelUtils.js
└── tflite
    ├── flatbuffers.js
    ├── schema
    │   ├── README.md
    │   └── schema_generated.js
    ├── TFliteModelImporter.js
    └── TfLiteModelUtils.js
ibelem commented 4 years ago

Fixed in https://github.com/intel/webml-polyfill/pull/1196/commits/30040e0ea62bbddfff1524e71127d2ec95ca8cde , https://github.com/intel/webml-polyfill/pull/1196/commits/97983cc346b56c0daae2e7acb595bee87b3c11b3 and https://github.com/ibelem/webnnmeeting/commit/70db597894a2e2d15f21f9c02b9bfda0dcc52594

ibelem commented 4 years ago

Root cause @https://github.com/intel/webml-polyfill/issues/1197