phoboslab / jsmpeg

MPEG1 Video Decoder in JavaScript
MIT License
6.3k stars 1.43k forks source link

When I integrate your code with FastAPI library, this error occurs in client. Can you help me? Thank you <3 #383

Open hovanvydut opened 2 years ago

hovanvydut commented 2 years ago

image

hovanvydut commented 2 years ago

I dont know why this variable quantMatrix in your lib is undefined

phoboslab commented 2 years ago

The quantization matrix has a default value that can be overwritten by the MPEG file. I assume your file/stream is invalid.

Test case please!

hovanvydut commented 2 years ago

This is my code. I have do the same thing with your sample. But it's not working :((. Thank you so much <3

import os
from fastapi import APIRouter, WebSocket
from fastapi.responses import StreamingResponse, HTMLResponse, Response
from subprocess import Popen, PIPE
from struct import Struct
import picamera
import asyncio
import io
from threading import Condition

from starlette import responses

router = APIRouter()

# CONFIG
WIDTH = 640
HEIGHT = 480
FRAMERATE = 24
VFLIP = False
HFLIP = False
JSMPEG_MAGIC = b"jsmp"
JSMPEG_HEADER = Struct(">4sHH")

html = """
<!DOCTYPE html>
<html>
<head>
    <meta name="viewport" content="width=640, initial-scale=1"/>
    <title>jsmpeg streaming</title>
    <style type="text/css">
        body {
            background: #333;
            text-align: center;
            margin-top: 10%;
        }
        #videoCanvas {
            /* Always stretch the canvas to 640x480, regardless of its
            internal size. */
            width: 640px;
            height: 480px;
        }
    </style>
    <script type="text/javascript" src="/static/jsmpg.js"></script>
</head>
<body>
    <!-- The Canvas size specified here is the "initial" internal resolution. jsmpeg will
        change this internal resolution to whatever the source provides. The size the
        canvas is displayed on the website is dictated by the CSS style.
    -->
    <canvas id="videoCanvas" width="640" height="480">
        <p>
            Please use a browser that supports the Canvas Element, like
            <a href="http://www.google.com/chrome">Chrome</a>,
            <a href="http://www.mozilla.com/firefox/">Firefox</a>,
            <a href="http://www.apple.com/safari/">Safari</a> or Internet Explorer 10
        </p>
    </canvas>
    <script type="text/javascript">
        // Show loading notice
        var canvas = document.getElementById('videoCanvas');
        var ctx = canvas.getContext('2d');
        ctx.fillStyle = '#444';
        ctx.fillText('Loading...', canvas.width/2-30, canvas.height/3);

        // Setup the WebSocket connection and start the player
        var client = new WebSocket('ws://' + window.location.hostname + ':4040/camera2');
        console.log(client)
        var player = new jsmpeg(client, {canvas:canvas});
    </script>
</body>
</html>
"""

class BroadcastOutput(object):
    def __init__(self, camera):
        print("Spawning background conversion process")
        self.converter = Popen(
            [
                "ffmpeg",
                "-f",
                "rawvideo",
                "-pix_fmt",
                "yuv420p",
                "-s",
                "%dx%d" % camera.resolution,
                "-r",
                str(float(camera.framerate)),
                "-i",
                "-",
                "-f",
                "mpeg1video",
                "-b",
                "800k",
                "-r",
                str(float(camera.framerate)),
                "-",
            ],
            stdin=PIPE,
            stdout=PIPE,
            stderr=io.open(os.devnull, "wb"),
            shell=False,
            close_fds=True,
        )

    def write(self, b):
        self.converter.stdin.write(b)

    def flush(self):
        print("Waiting for background conversion process to exit")
        self.converter.stdin.close()
        self.converter.wait()

@router.get("/camera2")
async def camera_view():
    return HTMLResponse(html)

@router.websocket("/camera2")
async def stream_camera2(websocket: WebSocket):
    await websocket.accept()
    print("Initializing camera")
    with picamera.PiCamera() as camera:
        camera.resolution = (WIDTH, HEIGHT)
        camera.framerate = FRAMERATE
        camera.vflip = VFLIP  # flips image rightside up, as needed
        camera.hflip = HFLIP  # flips image left-right, as needed
        await asyncio.sleep(1)

        output = BroadcastOutput(camera)

        print("Starting recording")
        camera.start_recording(output, format="yuv")

        try:
            while True:
                buf = output.converter.stdout.read1(32768)
                if buf:
                    await websocket.send_bytes(
                        buf
                    )
                elif output.converter.poll() is not None:
                    break

        finally:
            output.converter.stdout.close()