MixedReality-WebRTC is a collection of components to help mixed reality app developers integrate audio and video real-time communication into their application and improve their collaborative experience
I would like to get the remote audio bytes, so to try that quickly, I've updated TestReceiveAV example to work with 2.0 code, but I'm finding that while AudioTrackedAdded is called, AudioFrameReady is not. This is with the latest code on master. Otherwise the example works as intended and I can hear audio from the application (the browser which provides the remote source on the same machine per the example is muted). RemoteAudioTrack.OnFrameReady is not called.
If I roll back to TestReceiveAV that is in the repo (which relies on the 1.0 nuget still), I find that the older equivalent RemoteAudioFrameReady callback is called. However this not really an option for me, as I need another audio device, which when enabled causes the adm() check to fail.
namespace TestReceiveAV
{
public class WebRtcSession : WebSocketBehavior
{
public PeerConnection pc { get; private set; }
public event Action<WebRtcSession, string> MessageReceived;
public WebRtcSession()
{
pc = new PeerConnection();
pc.VideoTrackAdded += (RemoteVideoTrack track) =>
{
track.Argb32VideoFrameReady += (Argb32VideoFrame frame) =>
{
var width = frame.width;
var height = frame.height;
var stride = frame.stride;
var data = frame.data;
System.Drawing.Bitmap bmpImage = new System.Drawing.Bitmap((int)width, (int)height, (int)stride, System.Drawing.Imaging.PixelFormat.Format32bppArgb, data);
};
};
pc.AudioTrackAdded += (RemoteAudioTrack track) =>
{
// hit:
track.AudioFrameReady += (AudioFrame frame) =>
{
var data = frame.audioData; // never hit
};
};
}
protected override void OnMessage(MessageEventArgs e)
{
MessageReceived(this, e.Data);
}
}
class Program
{
private const string WEBSOCKET_CERTIFICATE_PATH = "c:/temp/certs/localhost.pfx";
private const int WEBSOCKET_PORT = 8081;
static void Main()
{
try
{
// Start web socket server.
Console.WriteLine("Starting web socket server...");
var webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT, true);
webSocketServer.SslConfiguration.ServerCertificate = new System.Security.Cryptography.X509Certificates.X509Certificate2(WEBSOCKET_CERTIFICATE_PATH);
webSocketServer.SslConfiguration.CheckCertificateRevocation = false;
//webSocketServer.Log.Level = WebSocketSharp.LogLevel.Debug;
webSocketServer.AddWebSocketService<WebRtcSession>("/", (session) =>
{
session.MessageReceived += MessageReceived;
});
webSocketServer.Start();
Console.WriteLine($"Waiting for browser web socket connection to {webSocketServer.Address}:{webSocketServer.Port}...");
ManualResetEvent mre = new ManualResetEvent(false);
mre.WaitOne();
}
catch (Exception e)
{
Console.WriteLine(e.Message);
}
}
private static async void MessageReceived(WebRtcSession session, string msg)
{
Console.WriteLine($"web socket recv: {msg.Length} bytes");
JObject jsonMsg = JObject.Parse(msg);
if ((string)jsonMsg["type"] == "ice")
{
Console.WriteLine($"Adding remote ICE candidate {msg}.");
while (!session.pc.Initialized)
{
// This delay is needed due to an initialise bug in the Microsoft.MixedReality.WebRTC
// nuget packages up to version 0.2.3. On master awaiting pc.InitializeAsync does end
// up with the pc object being ready.
Console.WriteLine("Sleeping for 1s while peer connection is initialising...");
await Task.Delay(1000);
}
//session.pc.AddIceCandidate((string)jsonMsg["sdpMLineindex"], (int)jsonMsg["sdpMid"], (string)jsonMsg["candidate"]);
IceCandidate ic = new IceCandidate()
{
SdpMid = (string)jsonMsg["sdpMid"],
SdpMlineIndex = (int)jsonMsg["sdpMLineindex"],
Content = (string)jsonMsg["candidate"]
};
session.pc.AddIceCandidate(ic);
}
else if ((string)jsonMsg["type"] == "sdp")
{
Console.WriteLine("Received remote peer SDP offer.");
var config = new PeerConnectionConfiguration();
session.pc.IceCandidateReadytoSend += (IceCandidate icecand) =>
{
Console.WriteLine($"Sending ice candidate: {icecand.Content}");
JObject iceCandidate = new JObject {
{ "type", "ice" },
{ "candidate", icecand.Content },
{ "sdpMLineindex", icecand.SdpMlineIndex },
{ "sdpMid", icecand.SdpMid}
};
session.Context.WebSocket.Send(iceCandidate.ToString());
};
session.pc.IceStateChanged += (newState) =>
{
Console.WriteLine($"ice connection state changed to {newState}.");
};
session.pc.LocalSdpReadytoSend += (SdpMessage message) =>
{
Console.WriteLine($"SDP answer ready, sending to remote peer.");
// Send our SDP answer to the remote peer.
JObject sdpAnswer = new JObject {
{ "type", "sdp" },
{ "answer", message.Content }
};
session.Context.WebSocket.Send(sdpAnswer.ToString());
};
await session.pc.InitializeAsync(config).ContinueWith((t) =>
{
var sdpmsg = new SdpMessage()
{
Type = SdpMessageType.Offer,
Content = (string)jsonMsg["offer"]
};
session.pc.SetRemoteDescriptionAsync(sdpmsg);
if (!session.pc.CreateAnswer())
{
Console.WriteLine("Failed to create peer connection answer, closing peer connection.");
session.pc.Close();
session.Context.WebSocket.Close();
}
});
// Create a new form to display the video feed from the WebRTC peer.
var form = new Form();
form.AutoSize = true;
form.BackgroundImageLayout = ImageLayout.Center;
PictureBox picBox = null;
form.HandleDestroyed += (object sender, EventArgs e) =>
{
Console.WriteLine("Form closed, closing peer connection.");
session.pc.Close();
session.Context.WebSocket.Close();
};
Application.EnableVisualStyles();
Application.Run(form);
}
}
}
I would like to get the remote audio bytes, so to try that quickly, I've updated TestReceiveAV example to work with 2.0 code, but I'm finding that while AudioTrackedAdded is called, AudioFrameReady is not. This is with the latest code on master. Otherwise the example works as intended and I can hear audio from the application (the browser which provides the remote source on the same machine per the example is muted). RemoteAudioTrack.OnFrameReady is not called.
If I roll back to TestReceiveAV that is in the repo (which relies on the 1.0 nuget still), I find that the older equivalent RemoteAudioFrameReady callback is called. However this not really an option for me, as I need another audio device, which when enabled causes the adm() check to fail.
namespace TestReceiveAV {
}