Open Joe-Palmer opened 5 years ago
It currently does not support VP9. It would probably make the library considerably bigger. I’ll take a look at some point (it also depends on browser support).
I had a first stab at this and managed to get it running: https://github.com/Joe-Palmer/webm-wasm/commit/6598e3a7e6462e5f2757f0d117d38c9e67e412f5
The encoder gives me data from live.html but it does not play in the video as it throws this exception:
live.html:85 Uncaught DOMException: Failed to execute 'appendBuffer' on 'SourceBuffer': This SourceBuffer has been removed from the parent media source.
at Worker.worker.onmessage.ev (http://localhost:8080/demo/live.html:85:22)
I'm going to analysis the data to see if it is valid video data but if you have any ideas as to why this exception occurs, let me know.
Regarding the size, the .wasm file roughly doubles (874 KB) when including the VP9 encoder which is a trade-off I am happy with for the improved video quality. Maybe it should be an option for the user to choose VP8, VP9 or both when compiling.
Oh great stuff! Who would have thought it’s that easy :D
My hunch is indeed that the SourceBuffer gets removed because the data is invalid. I don’t remember if there’s on onerror
event or something for MSE. What happens when you don’t use the live
demo but the simple
one instead?
Maybe it should be an option for the user to choose VP8, VP9 or both when compiling.
Yeah either that or we compile them into separate .wasm modules.
@Joe-Palmer Have you fixed the MediaSource
issue?
@Joe-Palmer One issue is that the first message from Worker
appears to be the string "READY"
, not an ArrayBuffer
.
I haven't had a chance to work on this since but do plan to at some point soon. Thanks for the tip 👍
@Joe-Palmer Two other issues are 1) controller.enqueue()
does not return a Promise
; 2) appending buffers to sourceBuffer
while the data is still being parsed. updateend
event of SourceBuffer
can be utilized to wait until the previous buffer is parsed before appending another buffer.
@surma Is postMessage()
ever executed in Worker
thread after "READY"
is posted as event.data
from the Worker
thread?
@surma At
.then(function(e) {
return r.postMessage("READY"),
Promise.resolve(a(r)).then(function(n) {
var t = Object.assign({}, i, n);
"kLive"in t || (t.kLive = t.realtime);
var a = new e.WebmEncoder(t.timebaseNum,t.timebaseDen,t.width,t.height,t.bitrate,t.realtime,t.kLive,function(e) {
var n = new Uint8Array(e);
r.postMessage(n.buffer, [n.buffer])
}
);
!function(e, n) {
if ("on"in e)
return e.on("message", n);
e.addEventListener("message", function(e) {
return n(e.data)
})
}(r, function(e) {
if (!e)
return a.finalize(),
r.postMessage(null),
void a.delete();
a.addRGBAFrame(e)
})
})
@Joe-Palmer A single message
event appears to be dispatched with realtime:true,kLive:true
set, not at intervals. <video>
with MediaSource
set does not play the buffer. A <video>
using Blob
and URL.createObjectURL()
does play the video.
function test() {
const framerate = 30;
const bitrate = 200;
const width = 400;
const height = 400;
var worker;
function createBufferURL(buffer, type = '') {
return URL.createObjectURL(new Blob([buffer], {
type
}));
}
function cameraStream({
width, height
}) {
const cvs = document.createElement("canvas");
const video = document.createElement("video");
document.body.appendChild(video);
[cvs.width, cvs.height] = [width, height];
const ctx = cvs.getContext("2d");
const frameTimeout = 1000 / framerate;
let n = 0;
return new ReadableStream({
async start(controller) {
const stream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: false
});
video.srcObject = stream;
await nextEvent(video, "canplay");
return video.play()
},
async pull(controller) {
await new Promise(r => setTimeout(r, frameTimeout))
if (++n >= 200) {
controller.close();
worker.postMessage(null);
}
ctx.drawImage(video, 0, 0, width, height);
controller.enqueue(
ctx.getImageData(0, 0, width, height)
);
}
});
}
// Returns the next <name> event of `target`.
function nextEvent(target, name) {
return new Promise(resolve => {
target.addEventListener(name, resolve, {
once: true
});
});
}
async function init() {
try {
const workerBuffer = await fetch("https://unpkg.com/webm-wasm@0.4.1/dist/webm-worker.js").then(r => r.arrayBuffer());
worker = new Worker(createBufferURL(workerBuffer, "text/javascript"));
worker.postMessage("https://unpkg.com/webm-wasm@0.4.1/dist/webm-wasm.wasm");
worker.postMessage({
width,
height,
realtime: true,
kLive: true
});
console.log(await nextEvent(worker, "message"));
// worker.addEventListener("message", e => { console.log(e) });
cameraStream({
width, height
})
.pipeTo(new WritableStream({
write(image) {
console.log('writing data at ' + performance.now());
//worker.addEventListener("message", e => { console.log(e) }, {once: true});
worker.postMessage(image.data.buffer, [image.data.buffer]);
}
})).catch(e => { throw e });
const mediaSource = new MediaSource();
mediaSource.onsourceopen = e => {
//URL.revokeObjectURL(video.src);
console.log(e);
const sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs=vp8');
sourceBuffer.addEventListener("updateend", e => {
console.log(e);
/*
worker.addEventListener("message", ev => {
console.log(ev);
if (!ev.data) {
console.log(ev.data);
// return mediaSource.endOfStream();
} else {
sourceBuffer.appendBuffer(ev.data);
}
}, {once: true});
*/
});
worker.addEventListener("message", ev => {
const x = ev.data.slice(0);
const v = document.createElement("video");
v.src = URL.createObjectURL(new Blob([x], {type:"video/webm"}));
v.controls = true;
document.body.appendChild(v);
console.log(ev);
sourceBuffer.appendBuffer(new Uint8Array(ev.data));
;
});
};
const video = document.createElement("video");
video.width = width;
video.height = height;
video.muted = true;
// video.autoplay = true;
// video.loop = true;
video.controls = true;
video.src = URL.createObjectURL(mediaSource);
document.body.appendChild(video);
video.addEventListener("canplay", e => {
console.log(e, e.target);
// video.play().catch(e => { throw e });
});
video.addEventListener("loadedmetadata", e => {
console.log(e, e.target);
// video.play().catch(e => { throw e });
});
} catch(e) {
throw e;
}
}
init().catch(e => {console.error(e); console.trace();});
}
var go = document.createElement("div");
go.innerHTML = "Go";
document.body.appendChild(go);
go.onclick = ev => {
console.log(ev);
// ev.target.remove();
test();
};
Nice work! I have added your code to https://github.com/Joe-Palmer/webm-wasm/commit/dacbb85faa68270dc295d84bbe36be273c39c9be
My video was not encoded correctly, but this may just be the incorrect video resolution. I will investigate further in the coming week...
Oh great stuff! Who would have thought it’s that easy :D
@surma It wasn't! :)
I have learned more about WASM and have a good debug environment setup so I'm now in a position to focus on getting this working. Simply changing vpx_codec_iface_t* iface = vpx_codec_vp8_cx();
to vpx_codec_iface_t* iface = vpx_codec_vp9_cx();
is causing the following errors:
This is where I feel having a better understanding of the VP8/VP9 inner workings would be really helpful.
Does anyone have any ideas why these errors might be happening? Or any suggestions on things to try to find out more?
Thanks in advance for any help you can provide.
Good news! I have managed to make the required changes to successfully encode VP9 😄
https://github.com/Joe-Palmer/webm-wasm/pull/1/files
This obviously hardcodes it to VP9 so VP8 no longer works. I assume you would want to support both so I haven't created a PR to this repo but happy to if that is helpful.
Great work! I’ll get back to this soon, I am currently swamped in some other efforts.
If you fancy, feel free to turn this into a PR with some sort of switch to either load the VP8 wasm or the VP9 wasm :D
Is it possible to support VP9 encoding with this great library?