awslabs / amazon-kinesis-video-streams-webrtc-sdk-js

JS SDK for interfacing with the Amazon Kinesis Video Streams Signaling Service.
https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-js/examples/index.html
Apache License 2.0
287 stars 143 forks source link

MediaStream object not working in React JS #45

Closed devb closed 4 years ago

devb commented 4 years ago

Hi guys. When I access stream using your sample code, it's working fine on example URL you've provided as well as when we clone it to our local machine and try it, it's working there as well.

But, when we create a React component using the same code that you've provided here and trying to view master stream as viewer. It's not playing in Video Player. All the logs are coming same as your example url, we get REMOTE Track event as well with MediaStream object but when the same is assigned to video.srcObject, it's not playing.

Below is the component I've created

Filename: livefeed.component.js

import React from 'react';
import AWS from "aws-sdk";
import ReactPlayer from 'react-player'

const viewer = {};

class LiveFeedView extends React.Component {
    constructor(props) {
        super(props);
        this.videoRef = React.createRef()
    }

    componentWillUnmount() {
        console.log('[VIEWER] Stopping viewer connection');
        if (viewer.signalingClient) {
            viewer.signalingClient.close();
            viewer.signalingClient = null;
        }

        if (viewer.peerConnection) {
            viewer.peerConnection.close();
            viewer.peerConnection = null;
        }

        if (viewer.localStream) {
            viewer.localStream.getTracks().forEach(track => track.stop());
            viewer.localStream = null;
        }

        if (viewer.remoteStream) {
            viewer.remoteStream.getTracks().forEach(track => track.stop());
            viewer.remoteStream = null;
        }

        if (viewer.peerConnectionStatsInterval) {
            clearInterval(viewer.peerConnectionStatsInterval);
            viewer.peerConnectionStatsInterval = null;
        }

        if (viewer.localView) {
            viewer.localView.srcObject = null;
        }

        if (viewer.remoteView) {
            viewer.remoteView.srcObject = null;
        }

        if (viewer.dataChannel) {
            viewer.dataChannel = null;
        }
    }

    async componentDidMount() {
        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this.props.formValues.region,
            accessKeyId: this.props.formValues.accessKeyId,
            secretAccessKey: this.props.formValues.secretAccessKey,
            sessionToken: this.props.formValues.sessionToken,
            endpoint: this.props.formValues.endpoint,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient.describeSignalingChannel({ ChannelName: this.props.formValues.channelName}).promise();
        const channelARN = describeSignalingChannelResponse.ChannelInfo.ChannelARN;
        console.log('[VIEWER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient.getSignalingChannelEndpoint({ ChannelARN: channelARN,
                SingleMasterChannelEndpointConfiguration: {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: window.KVSWebRTC.Role.VIEWER,
                },
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse.ResourceEndpointList.reduce((endpoints, endpoint) => {
            endpoints[endpoint.Protocol] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        console.log('[VIEWER] Endpoints: ', endpointsByProtocol);

        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this.props.formValues.region,
            accessKeyId: this.props.formValues.accessKeyId,
            secretAccessKey: this.props.formValues.secretAccessKey,
            sessionToken: this.props.formValues.sessionToken,
            endpoint: endpointsByProtocol.HTTPS,
        });

        // Get ICE server configuration
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient.getIceServerConfig({
                ChannelARN: channelARN,
            }).promise();

        const iceServers = [];
        iceServers.push({ urls: `stun:stun.kinesisvideo.${this.props.formValues.region}.amazonaws.com:443` });
        //if (!formValues.natTraversalDisabled) {
            getIceServerConfigResponse.IceServerList.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        //}
        console.log('[VIEWER] ICE servers: ', iceServers);

        // Create Signaling Client
        viewer.signalingClient = new window.KVSWebRTC.SignalingClient({
            channelARN,
            channelEndpoint: endpointsByProtocol.WSS,
            clientId: 'Hello2223',
            role: window.KVSWebRTC.Role.VIEWER,
            region: this.props.formValues.region,
            credentials: {
                accessKeyId: this.props.formValues.accessKeyId,
                secretAccessKey: this.props.formValues.secretAccessKey,
                sessionToken: this.props.formValues.sessionToken,
            },
        });

        const resolution = { width: { ideal: 1280 }, height: { ideal: 720 }};
        const constraints = {
            video: false,
            audio: true,
        };
        const configuration = {
            iceServers,
            iceTransportPolicy: 'all',
        };
        viewer.peerConnection = new RTCPeerConnection(configuration);

        viewer.signalingClient.on('open', async () => {
            console.log('[VIEWER] Connected to signaling service');

            // Get a stream from the webcam, add it to the peer connection, and display it in the local view
            try {
                viewer.localStream = await navigator.mediaDevices.getUserMedia(constraints);
                viewer.localStream.getTracks().forEach(track => viewer.peerConnection.addTrack(track, viewer.localStream));
                //localView.srcObject = viewer.localStream;
            } catch (e) {
                console.error('[VIEWER] Could not find webcam');
                return;
            }

            // Create an SDP offer to send to the master
            console.log('[VIEWER] Creating SDP offer');
            await viewer.peerConnection.setLocalDescription(
                await viewer.peerConnection.createOffer({
                    offerToReceiveAudio: true,
                    offerToReceiveVideo: true,
                }),
            );

            // When trickle ICE is enabled, send the offer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            //if (formValues.useTrickleICE) {
                console.log('[VIEWER] Sending SDP offer');
                viewer.signalingClient.sendSdpOffer(viewer.peerConnection.localDescription);
            //}
            console.log('[VIEWER] Generating ICE candidates');
        });

        viewer.signalingClient.on('sdpAnswer', async answer => {
            // Add the SDP answer to the peer connection
            console.log('[VIEWER] Received SDP answer');
            await viewer.peerConnection.setRemoteDescription(answer);
        });

        viewer.signalingClient.on('iceCandidate', candidate => {
            // Add the ICE candidate received from the MASTER to the peer connection
            console.log('[VIEWER] Received ICE candidate');
            viewer.peerConnection.addIceCandidate(candidate);
        });

        viewer.signalingClient.on('close', () => {
            console.log('[VIEWER] Disconnected from signaling channel');
        });

        viewer.signalingClient.on('error', error => {
            console.error('[VIEWER] Signaling client error: ', error);
        });

        // Send any ICE candidates to the other peer
        viewer.peerConnection.addEventListener('icecandidate', ({ candidate }) => {
            if (candidate) {
                console.log('[VIEWER] Generated ICE candidate');

                // When trickle ICE is enabled, send the ICE candidates as they are generated.
                if (this.props.formValues.useTrickleICE) {
                    console.log('[VIEWER] Sending ICE candidate');
                    viewer.signalingClient.sendIceCandidate(candidate);
                }
            } else {
                console.log('[VIEWER] All ICE candidates have been generated');

                // When trickle ICE is disabled, send the offer now that all the ICE candidates have ben generated.
                if (!this.props.formValues.useTrickleICE) {
                    console.log('[VIEWER] Sending SDP offer');
                    viewer.signalingClient.sendSdpOffer(viewer.peerConnection.localDescription);
                }
            }
        });

        // As remote tracks are received, add them to the remote view
        viewer.peerConnection.addEventListener('track', async (event) => {
            console.log('[VIEWER] Received remote track');
            // if (remoteView.srcObject) {
            //     return;
            // }
            viewer.remoteStream = event.streams[0];
            //this.setState({streamURL: event.streams[0]}); 
            this.videoRef.current.srcObject = event.streams[0];
        });

        console.log('[VIEWER] Starting viewer connection');
        viewer.signalingClient.open();
    }

    render() {
        return (
            <video ref={this.videoRef} style={{width: '100%', minHeight: '500px', maxHeight: '100px', position: 'relative' }} autoPlay playsinline />
        )
    }
}

export default LiveFeedView;

and above component I'm using like this on my screen in React JS.

Filename: singledevice.screen.js

import React, { Component } from 'react';
import Amplify, { Auth, Hub } from 'aws-amplify';
import AWS from "aws-sdk";
import { Link } from 'react-router-dom';

import LiveFeedView from 'components/liveFeed';

class SingleDevicePage extends Component {
    constructor(props){
        super(props);
        this.state = {streamOptions: null, token: '', user: {}, device: {}};
    }

    async componentDidMount() {
        await Auth.currentSession().then((data) => {
            this.setState({token: data.getIdToken().getJwtToken()}, () => {
                this.loadStream(data.getIdToken().getJwtToken());
            });
        }).catch((err) => {
            console.error(err);
        });
    }

    loadStream = (token) => {

        API.userCredentials(token).then((response) => {
            if(response.data.status) {

                var options = {
                    accessKeyId: response.data.data.accessKeyId,
                    secretAccessKey: response.data.data.secretAccessKey,
                    sessionToken: response.data.data.sessionToken,
                    region: 'ap-south-1',
                    channelName: this.props.match.params.ref
                }
                console.log(options);

                this.setState({user: response.data.data, streamOptions: options});
            }
        });
    }

    render() {
        return(
            <div className="wrapper">
                <LogoNavBar />
                <NotificationBar />
                <div className="main no-sidebar">
                    <div className="camera-single" style={{backgroundImage: 'url('+require('../assets/images/card-bigbg.jpg')+')'}}>
                        {this.state.streamOptions !== null ? <LiveFeedView formValues={this.state.streamOptions} /> : null }
                    </div>
                </div>
            </div>
        )
    }
}

export default SingleDevicePage;

Please help me out with this.. do check my code if in case I'm making any mistake. Thanks in advance.

lherman-cs commented 4 years ago

Not really sure where the problem is, I tried your snippet. I just changed the credentials and it seems to be working fine for me.

demo

Can you still reproduce it? If yes, do you mind to share the steps maybe?

Also, could you please check using the browser internal tool if you're actually receiving data from your video track?

devb commented 4 years ago

Hello Lukas @lherman-cs , First of all, Thank you for your prompt response. The video here that you've shared is of example URL only, and you're seeing yourself only. And I am trying to see video stream from master channel (return channel given in right in your video).

We've got WiFi Camera devices, and their LIVE stream output we want to integrate in our Web App using KVS. Right now, we don't have actual devices due to this pandemic, so we've created a Device Simulator using our laptop's webcam only and which almost does the same thing subscribed its stream to KVS as master which want to view in our web application as Viewer.

Please let me know if there's any further clarification required, and please do the needful.

lherman-cs commented 4 years ago

@devb please correct me if I'm wrong, following is your setup:

And, the problem is the video is not showing in the web app, right?

devb commented 4 years ago

Yes @lherman-cs.. But that Camera (on laptop) is temporary until we don't get the actual device. Yes. When we try to view that master stream using your example URL.. It works.. but same doesn't work when we connect from our React JS App.

If we compare the logs that is being generated by our app, is same as example URL logs. But only thing is stream is not there in Video Player.

lherman-cs commented 4 years ago

I'm sorry. But, I'm still not really sure what the problem is. But, following are things that I think worth to mention from your snippets:

  1. The constraints have the video disabled. Since this is disabled, the master will not be able to see the video stream from the viewer.
        const constraints = {
            video: false, // this one
            audio: true,
        };
  1. You don't have a video tag for showing the local video stream. This is not necessary. But, I'm implying that the video stream that you're seeing in the web app is the video stream that you receive from the master.

  2. I noticed that you're using trickle ICE by default. In this case, you should not send another sdp offer when the ice gathering is done, otherwise you'll get an error saying about setting the remote SDP in the wrong state. Following is specifically what I mean,

        viewer.peerConnection.addEventListener('icecandidate', ({ candidate }) => {
            if (candidate) {
                console.log('[VIEWER] Generated ICE candidate');

                // When trickle ICE is enabled, send the ICE candidates as they are generated.
                if (this.props.formValues.useTrickleICE) { // <-- You need to comment this out so we'll always send out ice candidates to the other peer whenever we receive them from STUN
                    console.log('[VIEWER] Sending ICE candidate');
                    viewer.signalingClient.sendIceCandidate(candidate);
                }
            } else {
                console.log('[VIEWER] All ICE candidates have been generated');

                // When trickle ICE is disabled, send the offer now that all the ICE candidates have ben generated.
                if (!this.props.formValues.useTrickleICE) { // <-- this whole block needs to be removed
                    console.log('[VIEWER] Sending SDP offer');
                    viewer.signalingClient.sendSdpOffer(viewer.peerConnection.localDescription);
                }
            }
        });

After I enabled video in the constraints and added a video tag for showing the local video stream, the web app works almost identically with the example. Please see below for the demo:

demo2

devb commented 4 years ago

Hello @lherman-cs , I guess you understood already, but the thing is It's not one-to-one communication OR video/audio sharing. I only want to see Master Video/Audio stream without sending any stream from local audio/video, like that constraints you've marked. As a viewer, I only wants to see return channel.. output from Master without subscribing any of my audio/video.

When I try to access MASTER channel from any computer where there is no Microphone and Webcam in there. It's not working, not even on your example URL.

I'll give it a try for the code which you've suggested there of SDP offer and will revert back to you.

Thanks

lherman-cs commented 4 years ago

I see. I understand your use case now. Sorry it took a while šŸ˜ƒ.

For your use case, I would not call getUserMedia at all since you don't need to need access a microphone or a webcam. Otherwise, like you've mentioned, it'll cause an error when the user doesn't have any of them.

Also, another thing that I noticed from your snippet, your client id is fixed to 'Hello2223`. If this is set to static, you can only have 1 viewer because our signaling server will fail to differentiate the viewers.

Following is a working example for your use case,

import React from 'react';
import AWS from "aws-sdk";

const viewer = {};

function uid() {
  return Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
}

class LiveFeedView extends React.Component {
    constructor(props) {
        super(props);
        this.videoRef = React.createRef()
    }

    componentWillUnmount() {
        console.log('[VIEWER] Stopping viewer connection');
        if (viewer.signalingClient) {
            viewer.signalingClient.close();
            viewer.signalingClient = null;
        }

        if (viewer.peerConnection) {
            viewer.peerConnection.close();
            viewer.peerConnection = null;
        }

        if (viewer.remoteStream) {
            viewer.remoteStream.getTracks().forEach(track => track.stop());
            viewer.remoteStream = null;
        }

        if (viewer.peerConnectionStatsInterval) {
            clearInterval(viewer.peerConnectionStatsInterval);
            viewer.peerConnectionStatsInterval = null;
        }

        if (viewer.remoteView) {
            viewer.remoteView.srcObject = null;
        }

        if (viewer.dataChannel) {
            viewer.dataChannel = null;
        }
    }

    async componentDidMount() {
        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this.props.formValues.region,
            accessKeyId: this.props.formValues.accessKeyId,
            secretAccessKey: this.props.formValues.secretAccessKey,
            sessionToken: this.props.formValues.sessionToken,
            endpoint: this.props.formValues.endpoint,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient.describeSignalingChannel({ ChannelName: this.props.formValues.channelName}).promise();
        const channelARN = describeSignalingChannelResponse.ChannelInfo.ChannelARN;
        console.log('[VIEWER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient.getSignalingChannelEndpoint({ ChannelARN: channelARN,
                SingleMasterChannelEndpointConfiguration: {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: window.KVSWebRTC.Role.VIEWER,
                },
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse.ResourceEndpointList.reduce((endpoints, endpoint) => {
            endpoints[endpoint.Protocol] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        console.log('[VIEWER] Endpoints: ', endpointsByProtocol);

        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this.props.formValues.region,
            accessKeyId: this.props.formValues.accessKeyId,
            secretAccessKey: this.props.formValues.secretAccessKey,
            sessionToken: this.props.formValues.sessionToken,
            endpoint: endpointsByProtocol.HTTPS,
        });

        // Get ICE server configuration
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient.getIceServerConfig({
                ChannelARN: channelARN,
            }).promise();

        const iceServers = [];
        iceServers.push({ urls: `stun:stun.kinesisvideo.${this.props.formValues.region}.amazonaws.com:443` });
        //if (!formValues.natTraversalDisabled) {
            getIceServerConfigResponse.IceServerList.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        //}
        console.log('[VIEWER] ICE servers: ', iceServers);

        // Create Signaling Client
        viewer.signalingClient = new window.KVSWebRTC.SignalingClient({
            channelARN,
            channelEndpoint: endpointsByProtocol.WSS,
            clientId: uid(),
            role: window.KVSWebRTC.Role.VIEWER,
            region: this.props.formValues.region,
            credentials: {
                accessKeyId: this.props.formValues.accessKeyId,
                secretAccessKey: this.props.formValues.secretAccessKey,
            },
        });

        const configuration = {
            iceServers,
            iceTransportPolicy: 'all',
        };
        viewer.peerConnection = new RTCPeerConnection(configuration);

        viewer.signalingClient.on('open', async () => {
            console.log('[VIEWER] Connected to signaling service');

            // Create an SDP offer to send to the master
            console.log('[VIEWER] Creating SDP offer');
            await viewer.peerConnection.setLocalDescription(
                await viewer.peerConnection.createOffer({
                    offerToReceiveAudio: true,
                    offerToReceiveVideo: true,
                }),
            );

            // When trickle ICE is enabled, send the offer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            console.log('[VIEWER] Sending SDP offer');
            viewer.signalingClient.sendSdpOffer(viewer.peerConnection.localDescription);
            console.log('[VIEWER] Generating ICE candidates');
        });

        viewer.signalingClient.on('sdpAnswer', async answer => {
            // Add the SDP answer to the peer connection
            console.log('[VIEWER] Received SDP answer');
            await viewer.peerConnection.setRemoteDescription(answer);
        });

        viewer.signalingClient.on('iceCandidate', candidate => {
            // Add the ICE candidate received from the MASTER to the peer connection
            console.log('[VIEWER] Received ICE candidate');
            viewer.peerConnection.addIceCandidate(candidate);
        });

        viewer.signalingClient.on('close', () => {
            console.log('[VIEWER] Disconnected from signaling channel');
        });

        viewer.signalingClient.on('error', error => {
            console.error('[VIEWER] Signaling client error: ', error);
        });

        // Send any ICE candidates to the other peer
        viewer.peerConnection.addEventListener('icecandidate', ({ candidate }) => {
            if (candidate) {
                console.log('[VIEWER] Generated ICE candidate');

                // When trickle ICE is enabled, send the ICE candidates as they are generated.
                console.log('[VIEWER] Sending ICE candidate');
                viewer.signalingClient.sendIceCandidate(candidate);
            } else {
                console.log('[VIEWER] All ICE candidates have been generated');
            }
        });

        // As remote tracks are received, add them to the remote view
        viewer.peerConnection.addEventListener('track', async (event) => {
            console.log('[VIEWER] Received remote track');
            // if (remoteView.srcObject) {
            //     return;
            // }
            viewer.remoteStream = event.streams[0];
            //this.setState({streamURL: event.streams[0]}); 
            this.videoRef.current.srcObject = event.streams[0];
        });

        console.log('[VIEWER] Starting viewer connection');
        viewer.signalingClient.open();
    }

    render() {
        return (
          <video ref={this.videoRef} style={{width: '100%', minHeight: '500px', maxHeight: '100px', position: 'relative' }} autoPlay playsInline />
        )
    }
}

function App() {
  const opts = {
                    accessKeyId: '<your access key id>',
                    secretAccessKey: '<your secret key>',
                    region: '<region>',
                    channelName: '<your channel>'
  };
  return (
    <div className="App">
      <LiveFeedView formValues={opts}></LiveFeedView>
    </div>
  );
}

export default App;
devb commented 4 years ago

I see. I understand your use case now. Sorry it took a while šŸ˜ƒ.

For your use case, I would not call getUserMedia at all since you don't need to need access a microphone or a webcam. Otherwise, like you've mentioned, it'll cause an error when the user doesn't have any of them.

Also, another thing that I noticed from your snippet, your client id is fixed to 'Hello2223`. If this is set to static, you can only have 1 viewer because our signaling server will fail to differentiate the viewers.

Following is a working example for your use case,

import React from 'react';
import AWS from "aws-sdk";

const viewer = {};

function uid() {
  return Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
}

class LiveFeedView extends React.Component {
    constructor(props) {
        super(props);
        this.videoRef = React.createRef()
    }

    componentWillUnmount() {
        console.log('[VIEWER] Stopping viewer connection');
        if (viewer.signalingClient) {
            viewer.signalingClient.close();
            viewer.signalingClient = null;
        }

        if (viewer.peerConnection) {
            viewer.peerConnection.close();
            viewer.peerConnection = null;
        }

        if (viewer.remoteStream) {
            viewer.remoteStream.getTracks().forEach(track => track.stop());
            viewer.remoteStream = null;
        }

        if (viewer.peerConnectionStatsInterval) {
            clearInterval(viewer.peerConnectionStatsInterval);
            viewer.peerConnectionStatsInterval = null;
        }

        if (viewer.remoteView) {
            viewer.remoteView.srcObject = null;
        }

        if (viewer.dataChannel) {
            viewer.dataChannel = null;
        }
    }

    async componentDidMount() {
        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this.props.formValues.region,
            accessKeyId: this.props.formValues.accessKeyId,
            secretAccessKey: this.props.formValues.secretAccessKey,
            sessionToken: this.props.formValues.sessionToken,
            endpoint: this.props.formValues.endpoint,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient.describeSignalingChannel({ ChannelName: this.props.formValues.channelName}).promise();
        const channelARN = describeSignalingChannelResponse.ChannelInfo.ChannelARN;
        console.log('[VIEWER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient.getSignalingChannelEndpoint({ ChannelARN: channelARN,
                SingleMasterChannelEndpointConfiguration: {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: window.KVSWebRTC.Role.VIEWER,
                },
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse.ResourceEndpointList.reduce((endpoints, endpoint) => {
            endpoints[endpoint.Protocol] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        console.log('[VIEWER] Endpoints: ', endpointsByProtocol);

        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this.props.formValues.region,
            accessKeyId: this.props.formValues.accessKeyId,
            secretAccessKey: this.props.formValues.secretAccessKey,
            sessionToken: this.props.formValues.sessionToken,
            endpoint: endpointsByProtocol.HTTPS,
        });

        // Get ICE server configuration
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient.getIceServerConfig({
                ChannelARN: channelARN,
            }).promise();

        const iceServers = [];
        iceServers.push({ urls: `stun:stun.kinesisvideo.${this.props.formValues.region}.amazonaws.com:443` });
        //if (!formValues.natTraversalDisabled) {
            getIceServerConfigResponse.IceServerList.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        //}
        console.log('[VIEWER] ICE servers: ', iceServers);

        // Create Signaling Client
        viewer.signalingClient = new window.KVSWebRTC.SignalingClient({
            channelARN,
            channelEndpoint: endpointsByProtocol.WSS,
            clientId: uid(),
            role: window.KVSWebRTC.Role.VIEWER,
            region: this.props.formValues.region,
            credentials: {
                accessKeyId: this.props.formValues.accessKeyId,
                secretAccessKey: this.props.formValues.secretAccessKey,
            },
        });

        const configuration = {
            iceServers,
            iceTransportPolicy: 'all',
        };
        viewer.peerConnection = new RTCPeerConnection(configuration);

        viewer.signalingClient.on('open', async () => {
            console.log('[VIEWER] Connected to signaling service');

            // Create an SDP offer to send to the master
            console.log('[VIEWER] Creating SDP offer');
            await viewer.peerConnection.setLocalDescription(
                await viewer.peerConnection.createOffer({
                    offerToReceiveAudio: true,
                    offerToReceiveVideo: true,
                }),
            );

            // When trickle ICE is enabled, send the offer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            console.log('[VIEWER] Sending SDP offer');
            viewer.signalingClient.sendSdpOffer(viewer.peerConnection.localDescription);
            console.log('[VIEWER] Generating ICE candidates');
        });

        viewer.signalingClient.on('sdpAnswer', async answer => {
            // Add the SDP answer to the peer connection
            console.log('[VIEWER] Received SDP answer');
            await viewer.peerConnection.setRemoteDescription(answer);
        });

        viewer.signalingClient.on('iceCandidate', candidate => {
            // Add the ICE candidate received from the MASTER to the peer connection
            console.log('[VIEWER] Received ICE candidate');
            viewer.peerConnection.addIceCandidate(candidate);
        });

        viewer.signalingClient.on('close', () => {
            console.log('[VIEWER] Disconnected from signaling channel');
        });

        viewer.signalingClient.on('error', error => {
            console.error('[VIEWER] Signaling client error: ', error);
        });

        // Send any ICE candidates to the other peer
        viewer.peerConnection.addEventListener('icecandidate', ({ candidate }) => {
            if (candidate) {
                console.log('[VIEWER] Generated ICE candidate');

                // When trickle ICE is enabled, send the ICE candidates as they are generated.
                console.log('[VIEWER] Sending ICE candidate');
                viewer.signalingClient.sendIceCandidate(candidate);
            } else {
                console.log('[VIEWER] All ICE candidates have been generated');
            }
        });

        // As remote tracks are received, add them to the remote view
        viewer.peerConnection.addEventListener('track', async (event) => {
            console.log('[VIEWER] Received remote track');
            // if (remoteView.srcObject) {
            //     return;
            // }
            viewer.remoteStream = event.streams[0];
            //this.setState({streamURL: event.streams[0]}); 
            this.videoRef.current.srcObject = event.streams[0];
        });

        console.log('[VIEWER] Starting viewer connection');
        viewer.signalingClient.open();
    }

    render() {
        return (
          <video ref={this.videoRef} style={{width: '100%', minHeight: '500px', maxHeight: '100px', position: 'relative' }} autoPlay playsInline />
        )
    }
}

function App() {
  const opts = {
                    accessKeyId: '<your access key id>',
                    secretAccessKey: '<your secret key>',
                    region: '<region>',
                    channelName: '<your channel>'
  };
  return (
    <div className="App">
      <LiveFeedView formValues={opts}></LiveFeedView>
    </div>
  );
}

export default App;

Thanks a lot Lukas. It worked!!

lherman-cs commented 4 years ago

Awesome. In that case, I'll close this ticket. Please feel free to reopen it if you have another issue related to this.

franco-roura commented 3 years ago

Hello, I have a few questions regarding the suggested approach above. Does AWS recommend building the accessKeyId and secretAccessKey into the user-facing code of a React Application?

I have a use case where I'd like to let users use AWS Kinesis to make streams on a web app, a similar concept to twitch.tv, but we wouldn't like to let users update just any object in the streams' bucket, just create a new one.

What is the recommended approach by AWS for this? Maybe Cognito with user federation? I'm a bit lost on this topic.