Closed kenchangh closed 8 years ago
Your problem is that you keep reassigning worker.onmessage
, which is only safe if there can't be multiple requests outstanding.
In other words, if you set worker.onmessage
multiple times, you won't necessarily end up calling the right callback for each. Your logging is correct in showing which requests are being made. videostream
asks for a stream starting at the very the beginning of the file, and then quickly changes its mind and asks for some data slightly later (24 bytes in). Ideally I'd optimize this to just keep reading from the same stream for this file, but mp4 files that actually require a much bigger jump aren't uncommon.
However, MultiStream
keeps requesting more chunks of the file based on the initial offset, so multiple requests are in flight at once, causing the wrong callback to be called.
Also, note that you probably just want stream.PassThrough
instead of creating a whole WorkerStream
type.
Here's code that works, for index.js:
var MultiStream = require('multistream')
var stream = require('stream');
var inherits = require('inherits');
var videostream = require('videostream');
var Buffer = require('buffer/').Buffer;
var worker = new Worker('worker.js');
var MB = 1000000;
var REQUEST_SIZE = 2*MB;
var file = function(path) {
var self = this
self.path = path
}
var nextRequestId = 0
// maps request ids to the appropriate callback
cbs = {}
// only set onmessage once
worker.onmessage = function(e) {
var fileSize = e.data.fileSize;
var buf = new Buffer(e.data.chunk);
var workerStream = new stream.PassThrough();
workerStream.push(buf);
workerStream.push(null);
var cb = cbs[e.data.id]
delete cbs[e.data.id]
cb(null, {
size: fileSize,
stream: workerStream
});
}
file.prototype.createReadStream = function(opts) {
var self = this;
opts = opts || {};
var start = opts.start || 0;
var fileSize = -1;
var multi = new MultiStream(function(cb) {
var end = opts.end ? (opts.end + 1) : fileSize;
var reqStart = start;
var reqEnd = start + REQUEST_SIZE;
if (end >= 0 && reqEnd > end) {
reqEnd = end;
}
if (reqStart >= reqEnd) {
return cb(null, null);
}
console.log(reqStart, reqEnd)
var payload = {
path: self.path,
start: reqStart,
end: reqEnd,
id: nextRequestId++,
};
cbs[payload.id] = function (err, obj) {
if (err) return cb(err)
fileSize = obj.size
cb(null, obj.stream)
}
worker.postMessage(payload);
// For the next request
start = reqEnd;
});
var destroy = multi.destroy
multi.destroy = function() {
destroy.call(multi);
};
return multi;
}
var video = document.querySelector('video');
video.addEventListener('error', function(err) {
console.error(video.error);
});
videostream(new file('../the-social-network.mp4'), video);
and for worker.js:
self.onmessage = function(e) {
var opts = e.data;
var {start, end, path, id} = opts;
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (xhr.readyState == XMLHttpRequest.DONE) {
var chunk = xhr.response;
var contentRange = xhr.getResponseHeader('Content-Range');
var fileSize = parseInt(contentRange.split('/')[1], 10);
postMessage({ fileSize, chunk, id });
}
}
var range = 'bytes=' + start + '-' + (end-1);
xhr.open("GET", path);
xhr.setRequestHeader('Range', range);
xhr.responseType = 'arraybuffer';
xhr.send();
};
Actually, I still see errors occasionally even with my fixed code. Let me take a bit more of a look.
Nevermind, it looks like the problem is specific to the (different) test video I was using, so it shouldn't affect you.
I have trouble running the code below with videostream. It's essentially what you did in
example/
but moving out the XHR to a web worker.In my code, videostream only goes through 3 streams and then stops completely.
This is index.js:
worker.js