microsoft / MixedReality-WebRTC

MixedReality-WebRTC is a collection of components to help mixed reality app developers integrate audio and video real-time communication into their application and improve their collaborative experience
https://microsoft.github.io/MixedReality-WebRTC/
MIT License
908 stars 282 forks source link

TestReceiveAV example with version 2.0.2 #671

Open martingra opened 3 years ago

martingra commented 3 years ago

I am trying to run the TestReceiveAV example, using the version 2.0.2 of MixedReality-WebRTC available via NuGet.

I changed the code as follows (original code is commented out)


using System;
using System.Drawing;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using Microsoft.MixedReality.WebRTC;
using Newtonsoft.Json.Linq;
using WebSocketSharp;
using WebSocketSharp.Server;

namespace TestReceiveAV
{
    public class WebRtcSession : WebSocketBehavior
    {
        public PeerConnection pc { get; private set; }

        public event Action<WebRtcSession, string> MessageReceived;

        public WebRtcSession()
        {
            pc = new PeerConnection();
        }

        protected override void OnMessage(MessageEventArgs e)
        {
            MessageReceived(this, e.Data);
        }
    }

    class Program
    {
        private const string WEBSOCKET_CERTIFICATE_PATH = "H:/SmartInMedia/ffmpeg/MixedReality-WebRTC-master/examples/TestReceiveAV/localhost.pfx";
        private const int WEBSOCKET_PORT = 8081;

        static void Main()
        {
            try
            {
                // Start web socket server.
                Console.WriteLine("Starting web socket server...");
                var webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT, true);
                webSocketServer.SslConfiguration.ServerCertificate = new System.Security.Cryptography.X509Certificates.X509Certificate2(WEBSOCKET_CERTIFICATE_PATH);
                webSocketServer.SslConfiguration.CheckCertificateRevocation = false;
                webSocketServer.AddWebSocketService<WebRtcSession>("/", (session) =>
                {
                    session.MessageReceived += MessageReceived;
                });
                webSocketServer.Start();

                Console.WriteLine($"Waiting for browser web socket connection to {webSocketServer.Address}:{webSocketServer.Port}...");

                ManualResetEvent mre = new ManualResetEvent(false);
                mre.WaitOne();
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }

        private static async void MessageReceived(WebRtcSession session, string msg)
        {
            Console.WriteLine($"web socket recv: {msg.Length} bytes");

            JObject jsonMsg = JObject.Parse(msg);

            if ((string)jsonMsg["type"] == "ice")
            {
                Console.WriteLine($"Adding remote ICE candidate {msg}.");

                while (!session.pc.Initialized)
                {
                    // This delay is needed due to an initialise bug in the Microsoft.MixedReality.WebRTC
                    // nuget packages up to version 0.2.3. On master awaiting pc.InitializeAsync does end 
                    // up with the pc object being ready.
                    Console.WriteLine("Sleeping for 1s while peer connection is initialising...");
                    await Task.Delay(1000);
                }

                //session.pc.AddIceCandidate((string)jsonMsg["sdpMLineindex"], (int)jsonMsg["sdpMid"], (string)jsonMsg["candidate"]);
                IceCandidate ic = new IceCandidate()
                {
                    SdpMid = (string)jsonMsg["sdpMid"],
                    SdpMlineIndex = (int)jsonMsg["sdpMLineindex"],
                    Content = (string)jsonMsg["candidate"]
                };
                session.pc.AddIceCandidate(ic);
            }
            else if ((string)jsonMsg["type"] == "sdp")
            {
                Console.WriteLine("Received remote peer SDP offer.");

                var config = new PeerConnectionConfiguration();

                //session.pc.IceCandidateReadytoSend += (string candidate, int sdpMlineindex, string sdpMid) =>
                //{
                //    Console.WriteLine($"Sending ice candidate: {candidate}");
                //    JObject iceCandidate = new JObject {
                //        { "type", "ice" },
                //        { "candidate", candidate },
                //        { "sdpMLineindex", sdpMlineindex },
                //        { "sdpMid", sdpMid}
                //    };
                //    session.Context.WebSocket.Send(iceCandidate.ToString());
                //};

                session.pc.IceCandidateReadytoSend += (IceCandidate icecand) =>
                {
                    Console.WriteLine($"Sending ice candidate: {icecand.Content}");
                    JObject iceCandidate = new JObject {
                        { "type", "ice" },
                        { "candidate", icecand.Content },
                        { "sdpMLineindex", icecand.SdpMlineIndex },
                        { "sdpMid", icecand.SdpMid}
                    };
                    session.Context.WebSocket.Send(iceCandidate.ToString());
                };

                session.pc.IceStateChanged += (newState) =>
                {
                    Console.WriteLine($"ice connection state changed to {newState}.");
                };

                //session.pc.LocalSdpReadytoSend += (string type, string sdp) =>
                //{
                //    Console.WriteLine($"SDP answer ready, sending to remote peer.");

                //    // Send our SDP answer to the remote peer.
                //    JObject sdpAnswer = new JObject {
                //        { "type", "sdp" },
                //        { "answer", sdp }
                //    };
                //    session.Context.WebSocket.Send(sdpAnswer.ToString());
                //};

                session.pc.LocalSdpReadytoSend += (SdpMessage message) =>
                {
                    Console.WriteLine($"SDP answer ready, sending to remote peer.");

                    // Send our SDP answer to the remote peer.
                    JObject sdpAnswer = new JObject {
                        { "type", "sdp" },
                        { "answer", message.Content }
                    };
                    session.Context.WebSocket.Send(sdpAnswer.ToString());
                };

                await session.pc.InitializeAsync(config).ContinueWith((t) =>
                {
                    var sdpmsg = new SdpMessage()
                    {
                        Type = SdpMessageType.Offer,
                        Content = (string)jsonMsg["offer"]
                    };

                    session.pc.SetRemoteDescriptionAsync(sdpmsg);

                    //session.pc.SetRemoteDescription((string)jsonMsg["offer"]);

                    if (!session.pc.CreateAnswer())
                    {
                        Console.WriteLine("Failed to create peer connection answer, closing peer connection.");
                        session.pc.Close();
                        session.Context.WebSocket.Close();
                    }
                });

                // Create a new form to display the video feed from the WebRTC peer.
                var form = new Form();
                form.AutoSize = true;
                form.BackgroundImageLayout = ImageLayout.Center;
                PictureBox picBox = null;

                form.HandleDestroyed += (object sender, EventArgs e) =>
                {
                    Console.WriteLine("Form closed, closing peer connection.");
                    session.pc.Close();
                    session.Context.WebSocket.Close();
                };

                session.pc.VideoTrackAdded += (RemoteVideoTrack track) =>
                {
                    track.Argb32VideoFrameReady += (Argb32VideoFrame frame) =>
                    {
                        var width = frame.width;
                        var height = frame.height;
                        var stride = frame.stride;
                        var data = frame.data;

                        if (picBox == null)
                        {
                            picBox = new PictureBox
                            {
                                Size = new Size((int)width, (int)height),
                                Location = new Point(0, 0),
                                Visible = true
                            };
                            form.BeginInvoke(new Action(() => { form.Controls.Add(picBox); }));
                        }

                        form.BeginInvoke(new Action(() =>
                        {
                            System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)stride, System.Drawing.Imaging.PixelFormat.Format32bppArgb, data);
                            picBox.Image = bmpImage;
                        }));
                    };
                };

                //session.pc.ARGBRemoteVideoFrameReady += (frame) =>
                //{
                //    var width = frame.width;
                //    var height = frame.height;
                //    var stride = frame.stride;
                //    var data = frame.data;

                //    if (picBox == null)
                //    {
                //        picBox = new PictureBox
                //        {
                //            Size = new Size((int)width, (int)height),
                //            Location = new Point(0, 0),
                //            Visible = true
                //        };
                //        form.BeginInvoke(new Action(() => { form.Controls.Add(picBox); }));
                //    }

                //    form.BeginInvoke(new Action(() =>
                //    {
                //        System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)stride, System.Drawing.Imaging.PixelFormat.Format32bppArgb, data);
                //        picBox.Image = bmpImage;
                //    }));
                //};

                Application.EnableVisualStyles();
                Application.Run(form);
            }
        }
    }
}

I am getting an empy form, where session.pc.VideoTrackAdded is never called.

Is that the correct way to capture the received frames from the web client?

martingra commented 3 years ago

Solved by changing the WebRtcSession class

    public class WebRtcSession : WebSocketBehavior
    {
        public PeerConnection pc { get; private set; }

        public event Action<WebRtcSession, string> MessageReceived;

        public WebRtcSession()
        {
            pc = new PeerConnection();

            pc.VideoTrackAdded += (RemoteVideoTrack track) =>
            {
                track.Argb32VideoFrameReady += (Argb32VideoFrame frame) =>
                {
                    var width = frame.width;
                    var height = frame.height;
                    var stride = frame.stride;
                    var data = frame.data;

                    System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)stride, System.Drawing.Imaging.PixelFormat.Format32bppArgb, data);
                };
            };
        }

        protected override void OnMessage(MessageEventArgs e)
        {
            MessageReceived(this, e.Data);
        }
    }

I think it would be helpful to include this updated working example with MixedReality-WebRTC 2.0.2 version.

STL1811 commented 3 years ago

Hello, I tried your changes but I don't understand the way your bitmap is copied in the PictureBox. Don't you have other changes ?

martingra commented 3 years ago

Sorry for the delay, I was on vacations. I didn't test how to render on the PictureBox, sorry. I was just trying to get the bitmaps.

STL1811 commented 3 years ago

Ok, Thanks for the answer.