Unity-Technologies / com.unity.webrtc

WebRTC package for Unity
Other
751 stars 189 forks source link

[Issue]: The unity app crashes once it reaches the lines for accessing the camera in the callJoinedUser Method #684

Closed MohammedShetaya closed 2 years ago

MohammedShetaya commented 2 years ago

Package version

2.4.0-exp.6

Environment

* OS: Windows
* Unity version: 19.4.3
* 
using System.Collections;
using Newtonsoft.Json;
using UnityEngine;
using Unity.WebRTC;
using WebSocketSharp;

namespace Unity.ARVideoStreaming
{
    public class WSClient : MonoBehaviour
    {

        private Camera arCamera;

        private WebSocket socket;

        private DelegateOnOffer onOffer;
        private DelegateOnAnswer onAnswer;

        private RTCPeerConnection localConnection;
        private DelegateOnNegotiationNeeded onNegotiationNeeded;
        private DelegateOnIceCandidate onIceCandidate;
        private DelegateOnIceConnectionChange onIceConnectionChange;

        private void Awake()
        {
            //connect to the WebServer through websocket
            socket = new WebSocket("ws://ar-video-streaming.herokuapp.com?sender=unity");
            socket.OnMessage += handleIncommingMessages;

            socket.Connect();
            Debug.Log(socket.ReadyState);

            WebRTC.WebRTC.Initialize(); 

            arCamera = GetComponent<Camera>();
            Debug.Log(arCamera);
        }

        private void Start()
        {

            onOffer = (m) => {
                StartCoroutine(handleOffer(m));
            };
            onAnswer = (m) => {

                StartCoroutine(handleAnswer(m));
            };

            onIceCandidate = (e) => { handleIceCandidate(e); };
            onIceConnectionChange = (e) => { handleIceConnectionChange(e); };
            onNegotiationNeeded = () => { Debug.Log("negotiation needed"); StartCoroutine(handleNegotiationNeeded()); };

        }

        // Update is called once per frame
        void Update()
        {

        }

        public void handleIncommingMessages(object sender, MessageEventArgs e)
        {

            Debug.Log("Got message");
            SignalingMessage message = JsonConvert.DeserializeObject<SignalingMessage>(e.Data);

            switch (message.type)
            {
                case "offer": onOffer(message); break;
                case "answer": onAnswer(message); break;
                case "ice-candidate": handleIceCandidateMessage(message); break;
                case "user-joined": callJoinedUser(); break;
            }
        }

        private void callJoinedUser()
        {

            //Create local peer
            RTCConfiguration config = default;
            config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } };
            localConnection = new RTCPeerConnection(ref config);

            localConnection.OnNegotiationNeeded = onNegotiationNeeded;
            localConnection.OnIceCandidate += onIceCandidate;
            localConnection.OnIceConnectionChange = onIceConnectionChange;

            Debug.Log(arCamera.enabled);
            RenderTexture rt;
            RenderTextureFormat format = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            rt = new RenderTexture(1270, 720, 0, format);
            rt.Create();

            arCamera.targetTexture = rt;

            localConnection.AddTrack(new VideoStreamTrack(rt));
        }

        private IEnumerator handleNegotiationNeeded()
        {
            Debug.Log("inside negotiation");
            var offer = localConnection.CreateOffer();
            yield return offer;

            var offerDesc = offer.Desc;
            var setLocalDesc = localConnection.SetLocalDescription(ref offerDesc);
            yield return setLocalDesc;

            socket.Send(JsonConvert.SerializeObject(new SignalingMessage
            {
                type = "offer",
                sessionDescription = offerDesc
            }));
        }

        private void handleIceCandidate(RTCIceCandidate candidate)
        {
            if (!string.IsNullOrEmpty(candidate.Candidate))
            {
                socket.Send(JsonConvert.SerializeObject(new SignalingMessage
                {
                    type = "ice-candidate",
                    outcommingIceCandidate = candidate
                }));
            }
        }
        private void handleIceConnectionChange(RTCIceConnectionState state)
        {
            Debug.Log("IceCandidate state is " + state.ToString());
        }

        public IEnumerator handleOffer(SignalingMessage offerMessage)
        {
            var setRemoteDesc = localConnection.SetRemoteDescription(ref offerMessage.sessionDescription);
            yield return setRemoteDesc;

            var answer = localConnection.CreateAnswer();
            yield return answer;

            var answerDesc = answer.Desc;
            var setLocalDesc = localConnection.SetLocalDescription(ref answerDesc);
            yield return setLocalDesc;

            SignalingMessage answerMessage = new SignalingMessage
            {
                type = "answer",
                sessionDescription = answerDesc
            };

            socket.Send(JsonConvert.SerializeObject(answerMessage));

            Debug.Log("offer is handled");
        }

        public IEnumerator handleAnswer(SignalingMessage answerMessage)
        {
            var setRemoteDesc = localConnection.SetRemoteDescription(ref answerMessage.sessionDescription);
            yield return setRemoteDesc;
            Debug.Log("Answer is handled");
        }

        public void handleIceCandidateMessage(SignalingMessage candidateMessage)
        {
            RTCIceCandidate iceCandidate = new RTCIceCandidate(candidateMessage.incommingIceCandidate);
            localConnection.AddIceCandidate(iceCandidate);
            Debug.Log("ice candidate msg is handled");
        }

    }

}

Steps To Reproduce

N/A

Current Behavior

The app crashes on line 92 and do not execute the line RenderTextureFormat format = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType); also if I tried to access the camera properties it crashes too /////////////////////////////////////////////////////////////////////////////////////////

Expected Behavior

to create a track and send it to the local peer in order to fire the onNegotiationNeeded method

Anything else?

No response

karasusan commented 2 years ago

@MohammedShetaya Could you check the arCamera variable null or not?

MohammedShetaya commented 2 years ago

using the above script the arCamera is not null as the script is attached to the camera of the ARfoundation's AR Session Origin object and the awake method calls GetComponent so it finds the camera object.

The aim of this project is to send the camera feed from unity and annotate that feed with AR features. So I have a unity client and a browser client. I have another implementation that is more clear and it works fine for exchanging the messages between the two peers, and I receive a MediaStream object on the browser peer. The problem is that the video stream is not playing on the browser.

The unity script which is attached to the camera: `using System.Collections; using Newtonsoft.Json; using UnityEngine; using Unity.WebRTC; using WebSocketSharp; using System.Collections.Generic; using UnityEngine.Experimental.Rendering;

namespace Unity.ARVideoStreaming { public class WSClient : MonoBehaviour {

    List<IEnumerator> threadPumpList = new List<IEnumerator>();

    private Camera arCamera;
    private WebSocket socket;

    private RTCPeerConnection localConnection;
    private List<RTCIceCandidate> iceCandidateBuffer = new List<RTCIceCandidate>();
    bool signalingComplete = false ;

    private void Awake()
    {
        //connect to the WebServer through websocket
        socket = new WebSocket("ws://ar-video-streaming.herokuapp.com?sender=unity");
        socket.OnMessage += (o, e) => {
            threadPumpList.Add(handleIncommingMessages(o, e));
        };

        socket.Connect();
        Debug.Log(socket.ReadyState);

        WebRTC.WebRTC.Initialize(EncoderType.Software);
        arCamera = GetComponent<Camera>();

    }

    private void Start()
    {

    }

    // Update is called once per frame
    void Update()
    {
        while (threadPumpList.Count > 0)
        {
            StartCoroutine(threadPumpList[0]);
            threadPumpList.RemoveAt(0);
        }

        if (localConnection  != null && localConnection.RemoteDescription.sdp != null ) {
            while(iceCandidateBuffer.Count > 0)
            {
                RTCIceCandidate iceCandidate = iceCandidateBuffer[0];
                localConnection.AddIceCandidate(iceCandidate);
                iceCandidateBuffer.RemoveAt(0);
                Debug.Log("ice candidate msg is handled");
            }
        }
    }

    public IEnumerator handleIncommingMessages(object sender, MessageEventArgs e)
    {

        Debug.Log("Got message");
        Debug.Log(e.Data);
        SignalingMessage message = JsonConvert.DeserializeObject<SignalingMessage>(e.Data);

        switch (message.type)
        {
            case "offer": yield return handleOffer(message); break;
            case "answer": yield return handleAnswer(message); break;
            case "ice-candidate": handleIceCandidateMessage(message); break;
            case "user-joined": callJoinedUser(); break;
        }
    }

    private void callJoinedUser()
    {
        //Create local peer
        RTCConfiguration config = default;
        config.iceServers = new[] { new RTCIceServer { urls = new[] { "stun:stun.l.google.com:19302" } } };
        localConnection = new RTCPeerConnection(ref config);

        localConnection.OnNegotiationNeeded = () => { Debug.Log("negotiation needed"); StartCoroutine(handleNegotiationNeeded()); };;
        localConnection.OnIceCandidate += handleIceCandidate;
        localConnection.OnIceConnectionChange = handleIceConnectionChange;

        RenderTexture rt;
        if (arCamera.targetTexture != null)
        {
            rt = arCamera.targetTexture;
            RenderTextureFormat supportFormat = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            GraphicsFormat graphicsFormat = GraphicsFormatUtility.GetGraphicsFormat(supportFormat, RenderTextureReadWrite.Default);
            GraphicsFormat compatibleFormat = SystemInfo.GetCompatibleFormat(graphicsFormat, FormatUsage.Render);
            GraphicsFormat format = graphicsFormat == compatibleFormat ? graphicsFormat : compatibleFormat;

            if (rt.graphicsFormat != format)
            {
                Debug.LogWarning(
                    $"This color format:{rt.graphicsFormat} not support in unity.webrtc. Change to supported color format:{format}.");
                rt.Release();
                rt.graphicsFormat = format;
                rt.Create();
            }

            arCamera.targetTexture = rt;
        }
        else
        {
            RenderTextureFormat format = WebRTC.WebRTC.GetSupportedRenderTextureFormat(SystemInfo.graphicsDeviceType);
            rt = new RenderTexture(1270, 720, 0, format)
            {
                antiAliasing = 1
            };
            rt.Create();
            arCamera.targetTexture = rt;
        }

        localConnection.AddTrack(new VideoStreamTrack(rt));
        Debug.Log("track added");

    }

    private IEnumerator handleNegotiationNeeded()
    {
        var offer = localConnection.CreateOffer();
        yield return offer;

        var offerDesc = offer.Desc;
        var setLocalDesc = localConnection.SetLocalDescription(ref offerDesc);
        yield return setLocalDesc;

        socket.Send(JsonConvert.SerializeObject(new SignalingMessage
        {
            type = "offer",
            sessionDescription = offerDesc
        }));
    }

    private void handleIceCandidate(RTCIceCandidate candidate)
    {
        if (!string.IsNullOrEmpty(candidate.Candidate))
        {
            socket.Send(JsonConvert.SerializeObject(new SignalingMessage
            {
                type = "ice-candidate",
                outcommingIceCandidate = candidate
            }));
        }
    }
    private void handleIceConnectionChange(RTCIceConnectionState state)
    {
        Debug.Log("IceCandidate state is " + state.ToString());
    }

    public IEnumerator handleOffer(SignalingMessage offerMessage)
    {

        var setRemoteDesc = localConnection.SetRemoteDescription(ref offerMessage.sessionDescription);
        yield return setRemoteDesc;

        var answer = localConnection.CreateAnswer();
        yield return answer;

        var answerDesc = answer.Desc;
        var setLocalDesc = localConnection.SetLocalDescription(ref answerDesc);
        yield return setLocalDesc;

        SignalingMessage answerMessage = new SignalingMessage
        {
            type = "answer",
            sessionDescription = answerDesc
        };

        socket.Send(JsonConvert.SerializeObject(answerMessage));

        Debug.Log("offer is handled");
    }

    public IEnumerator handleAnswer(SignalingMessage answerMessage)
    {
        var setRemoteDesc = localConnection.SetRemoteDescription(ref answerMessage.sessionDescription);
        yield return setRemoteDesc;
        Debug.Log("Answer is handled");

    }

    public void handleIceCandidateMessage(SignalingMessage candidateMessage)
    {

        RTCIceCandidate iceCandidate = new RTCIceCandidate(candidateMessage.incommingIceCandidate);
        iceCandidateBuffer.Add(iceCandidate);
    }

}

}`

The react.js code for the browser client:

`import React,{useEffect,useRef} from "react";

export default function Signaling (props) {

const localConnection = useRef() ;
const socket = useRef() ;
const localPeerVideo = useRef() ;
const remotePeerVideo = useRef() ;
const iceCandidateBuffer = useRef() ;

useEffect(()=>{

    socket.current = new WebSocket('ws://ar-video-streaming.herokuapp.com?sender=browser');

    socket.current.onmessage = (message)=>{
        const jsonData = JSON.parse(message.data) ;

        switch(jsonData.type) {
            case 'offer': handleOffer(jsonData);
                break;
            case 'answer': handleAnswer(jsonData) ;
                break; 
            case 'ice-candidate': handleIceCandidateMessage(jsonData) ;
                break;
            case 'error': handleErrorMessage(jsonData) ;break ;
            default: console.log("Invalid Message Type");
        } 
    }
    socket.current.onclose = ()=> {console.log("socket closed ")} ;

    navigator.mediaDevices.getUserMedia({video: true}).then(stream => {
        localPeerVideo.current.srcObject = stream;
        console.log(localPeerVideo)  ;
    });

    iceCandidateBuffer.current = []; 
})

useEffect(() => {
    if(localConnection.current && localConnection.current.remoteDescription){
        while(iceCandidateBuffer.current.length > 0) {
            const candidate = iceCandidateBuffer.current.pop();
            localConnection.current.addIceCandidate(candidate);
        }
    }
},[iceCandidateBuffer])

async function handleNegotiationNeeded () {
    const offer = await createOffer() ;
    socket.current.send(JSON.stringify({
        type:'offer',
        sessionDescription:offer,
    }));
}

function handleIceCandidateEvent (e) { 
    if(e.candidate) {
        socket.current.send(JSON.stringify({
            type: "ice-candidate",
            sessionDescription:{} ,
            incommingIceCandidate: e.candidate
        }));
    }
}

function handleTrack (e) {
    console.log("Recieved a Track");
    console.log(e) ;
    remotePeerVideo.current.srcObject = e.streams[0];
}

function handleIceCandidateMessage (candidateData) {
    console.log("incomming ice candidate") ;
    console.log(localConnection.current.currentRemoteDescription) ;
    const candidate = new RTCIceCandidate(decapitalize(candidateData.outcommingIceCandidate));
    iceCandidateBuffer.current.push(candidate);
}

async function createOffer () {
    const offer = await localConnection.current.createOffer();
    await localConnection.current.setLocalDescription(offer);
    return offer;
}

async function handleOffer (offerData) {
    console.log(offerData) ;
    const desc = await new RTCSessionDescription({...offerData.sessionDescription,type:"offer"}) ;    
    await localConnection.current.setRemoteDescription(desc) ;
    const answer = await localConnection.current.createAnswer() ;
    await localConnection.current.setLocalDescription(answer) ;
    socket.current.send(JSON.stringify({
        type:"answer",
        sessionDescription:answer
    }))

    console.log("answer is sent") ;
}

async function handleAnswer (answerData) {
    const desc = await new RTCSessionDescription ({...answerData.sessionDescription,type:"answer"}) ;
    await localConnection.current.setRemoteDescription(desc) ;
}

function handleErrorMessage (message) {
    alert(message.message) ;
}
/*function handleDataChannelStateChange () {
    console.log("data channel is open " , dataChannel.current.readyState) ;
}

function handleDataChannelMessage (data) {
    console.log(data) ;
}

function sendDataChannelMessage () {
    dataChannel.current.send("Message from browser") ;
}*/

async function handleClick () {

    //create a local peer to represent this client
    const configuration = {'iceServers': [{'urls': 'stun:stun.l.google.com:19302'}]};
    localConnection.current = new RTCPeerConnection(configuration);
    localConnection.current.onicecandidate = handleIceCandidateEvent ;
    localConnection.current.onnegotiationneeded = handleNegotiationNeeded ;
    localConnection.current.ontrack = handleTrack ;

   socket.current.send(JSON.stringify({
       type:"user-joined"
   })) 
   /*dataChannel.current = localConnection.current.createDataChannel("data channel");
   dataChannel.current.onopen = handleDataChannelStateChange ;
   dataChannel.current.onmessage = handleDataChannelMessage ;

    <button onClick = {() => sendDataChannelMessage() } >
            Send Message        
        </button>  

   */
}

return (
    <>
    <div>
        <button onClick={()=> handleClick()}>
            Call Unity
        </button>
    </div>

    <br/>

    <div style={{backgroundColor:"green"}}>
        <video  style ={{width:"500px",backgroundColor:"blue"}} autoPlay={true} ref={localPeerVideo} />
        <video  style ={{width:"500px",backgroundColor:"blue"}} autoPlay={true} ref={remotePeerVideo} />

    </div>
    </>
);

}

function capitalize (jsonObject) { let result = {}; for(var key in jsonObject){ result[key.charAt(0).toUpperCase() + key.slice(1)] = jsonObject[key] ; } return result; }

function decapitalize(jsonObject) { let result = {} ; for(var key in jsonObject){ result[key.charAt(0).toLowerCase() + key.slice(1)] = jsonObject[key] ; } return result;
} `

I have followed the documentation for 3 weeks now and I am not getting any results yet. Here's the signaling flow:

  1. The browser client clicks on a button which sends a message to the unity client through the websocket
  2. The unity client fires handleIncommingMessages method which will execute the callJoinedUser method
  3. callJoinedUser creates a VideoStreamTrack and calls the addTrack method to send the captured stream
  4. Once the addTrack is called the onNegotiationNeeded event is fired and so is handler handleNegotiationNeeded which sends an offer to the browser peer
  5. The browser gets the offer and set its remote description and then generates the answer and send it back to the unity client.
  6. The unity client receives the answer and set its remote description

I have tried two ways for ice candidate exchange:

The fisrt is to call the addIceCandidate method once it is received from the other peer The second is to queue the received IceCandidates unity the remote description is set unfortunately, both ways are not working and I still receive a MediaStream from unity client but it does not work. Any ideas what might be wrong?