awslabs / amazon-kinesis-video-streams-webrtc-sdk-js

JS SDK for interfacing with the Amazon Kinesis Video Streams Signaling Service.
https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-js/examples/index.html
Apache License 2.0
286 stars 141 forks source link

Why does createDataChannel from master fail and createDataChannel from viewer succeed? #199

Closed helloworld098765 closed 1 year ago

helloworld098765 commented 1 year ago

Why does createDataChannel from master fail and createDataChannel from viewer succeed?

Node.js v16.17.1

1-->createDataChannel from master (Fail)

master

import * as KVSWebRTC from 'amazon-kinesis-video-streams-webrtc';
import * as AWS from 'aws-sdk';
import * as WRTC from 'wrtc';

export interface IMaster {
    signalingClient: KVSWebRTC.SignalingClient | null;
    peerConnectionByClientId: any;
    dataChannelByClientId: any;
    localStream: any;
    remoteStreams: any;
    peerConnectionStatsInterval: any;
}
export class KvsManager {

    private readonly _accessKeyId = '';

    private readonly _secretAccessKey = '';

    private readonly _region = '';

    private readonly _channelName = '';

    private readonly _sessionToken = undefined;

    private readonly _natTraversalDisabled = false;

    private readonly _forceTURN = false;

    private readonly _useTrickleICE = true;

    private readonly _widescreen = true;

    private readonly _sendVideo = true;

    private readonly _sendAudio = false;

    private readonly _openDataChannel = true;

    public async start(): Promise<void> {

        const master: IMaster = {
            signalingClient: null,
            peerConnectionByClientId: {},
            dataChannelByClientId: {},
            localStream: null,
            remoteStreams: [],
            peerConnectionStatsInterval: null,
        };

        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: undefined,
            correctClockSkew: true,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient
            .describeSignalingChannel({
                ChannelName: this._channelName,
            }).promise();
        if (describeSignalingChannelResponse === undefined) {
            throw new Error('describeSignalingChannelResponse === undefined');
        }
        const channelARN = describeSignalingChannelResponse.ChannelInfo!.ChannelARN!;
        console.log('[MASTER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient
            .getSignalingChannelEndpoint({
                ChannelARN: channelARN,
                SingleMasterChannelEndpointConfiguration:
                {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: KVSWebRTC.Role.MASTER
                }
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse
            .ResourceEndpointList!.reduce((endpoints, endpoint) => {
            const index = endpoint.Protocol === undefined ? '' : endpoint.Protocol;
            (endpoints as any)[index] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        console.log('[MASTER] Endpoints: ', endpointsByProtocol);
        // Create Signaling Client
        master.signalingClient = new KVSWebRTC.SignalingClient({
            channelARN,
            channelEndpoint: (endpointsByProtocol as any)['WSS'] ?? '',
            role: KVSWebRTC.Role.MASTER,
            region: this._region,
            credentials: {
                accessKeyId: this._accessKeyId,
                secretAccessKey: this._secretAccessKey,
                sessionToken: this._sessionToken,
            },
            systemClockOffset: kinesisVideoClient.config.systemClockOffset,
        });

        // Get ICE server configuration
        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: (endpointsByProtocol as any)['HTTPS'],
            correctClockSkew: true,
        });
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
            .getIceServerConfig({
                ChannelARN: channelARN,
            }).promise();
        const iceServers = [];

        if (!this._natTraversalDisabled && !this._forceTURN) {
            iceServers.push({ urls: `stun:stun.kinesisvideo.${this._region}.amazonaws.com:443` });
        }
        if (!this._natTraversalDisabled) {
            // eslint-disable-next-line @reolink/general/disable-for-each-method
            getIceServerConfigResponse.IceServerList!.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        }
        console.log('[MASTER] ICE servers: ', iceServers);

        const configuration = {
            iceServers,
            iceTransportPolicy: this._forceTURN ? 'relay' : 'all',
        };

        const resolution = this._widescreen ?
            { width: { ideal: 1280 }, height: { ideal: 720 } } : { width: { ideal: 640 }, height: { ideal: 480 } };
        const constraints = {
            video: this._sendVideo ? resolution : false,
            audio: this._sendAudio,
        };

        // Get a stream from the webcam and display it in the local view.
        // If no video/audio needed, no need to request for the sources.
        // Otherwise, the browser will throw an error saying that either video or audio has to be enabled.
        if (this._sendVideo || this._sendAudio) {
            try {
                master.localStream = await navigator.mediaDevices.getUserMedia(constraints);
                // localView.srcObject = master.localStream;
            }
            catch (e) {
                console.error('[MASTER] Could not find webcam');
                console.log(JSON.stringify(e));
            }
        }

        // eslint-disable-next-line @typescript-eslint/no-misused-promises, @typescript-eslint/require-await
        master.signalingClient.on('open', () => {
            console.log('[MASTER] Connected to signaling service');
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        master.signalingClient.on('sdpOffer', async (offer: WRTC.RTCSessionDescriptionInit, remoteClientId: string) => {
            console.log('[MASTER] Received SDP offer from client: ' + remoteClientId);
            // Create a new peer connection using the offer from the given client
            const peerConnection = new WRTC.RTCPeerConnection({
                iceTransportPolicy: configuration.iceTransportPolicy as WRTC.RTCIceTransportPolicy,
                iceServers: configuration.iceServers
            });
            master.peerConnectionByClientId[remoteClientId] = peerConnection;
            if (this._openDataChannel) {
                master.dataChannelByClientId[remoteClientId] = peerConnection.createDataChannel('hello-data');
                (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onerror = (e) => {
                    console.log('error--->' + JSON.stringify(e));
                };
                (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onopen = (e) => {
                    console.log('open--->' + JSON.stringify(e));
                };
                (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onclose = (e) => {
                    console.log('close--->' + JSON.stringify(e));
                };
                (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onclosing = (e) => {
                    console.log('closing--->' + JSON.stringify(e));
                };
                (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onmessage = (e) => {
                    console.log('message--->' + JSON.stringify(e));
                };
                // peerConnection.ondatachannel = (event) => {
                //     const channel = event.channel;
                //     event.channel.onopen = (event) => {
                //         console.log('onopen-event-->' + JSON.stringify(event));
                //         console.log('readyState-->' + channel.readyState);
                //         channel.send('fuck,cao');
                //     };
                //     event.channel.onmessage = (event) => {
                //         console.log('onmessage-event-->' + JSON.stringify(event));
                //     };
                //     event.channel.onclose = (event) => {
                //         console.log('close-event-->' + JSON.stringify(event));
                //     };
                //     event.channel.onerror = (event) => {
                //         console.log('error-event-->' + JSON.stringify(event));
                //     };
                //     console.log('event.channel.onmessage = onRemoteDataMessage' + JSON.stringify(event));
                // };
            }

            // Poll for connection stats
            if (!master.peerConnectionStatsInterval) {
                master.peerConnectionStatsInterval =
                // eslint-disable-next-line @typescript-eslint/no-misused-promises
                setInterval(() => peerConnection.getStats().then(
                    (report) => {
                        console.log(JSON.stringify(report));
                        console.log('iceConnectionState--->' + master.peerConnectionByClientId[remoteClientId].iceConnectionState);
                        console.log('readyState-->' + master.dataChannelByClientId[remoteClientId].readyState);
                        if (master.dataChannelByClientId[remoteClientId].readyState === 'open') {
                            console.log('send----------------------------');
                            this.sendViewerMessage(master.dataChannelByClientId[remoteClientId], 'hello,world');
                        }
                    }
                ), 1000);
                // setInterval(() => peerConnection.getStats().then(), 1000);
            }
            // Send any ICE candidates to the other peer
            peerConnection.addEventListener('icecandidate', ({ candidate }) => {
                if (candidate) {
                    console.log('[MASTER] Generated ICE candidate for client: ' + remoteClientId);

                    // When trickle ICE is enabled, send the ICE candidates as they are generated.
                    if (this._useTrickleICE) {
                        console.log('[MASTER] Sending ICE candidate to client: ' + remoteClientId);
                        master.signalingClient!.sendIceCandidate(candidate, remoteClientId);
                    }
                }
                else {
                    console.log('[MASTER] All ICE candidates have been generated for client: ' + remoteClientId);

                    // When trickle ICE is disabled, send the answer now that all the ICE candidates have ben generated.
                    if (!this._useTrickleICE) {
                        console.log('[MASTER] Sending SDP answer to client: ' + remoteClientId);
                        master.signalingClient!.sendSdpAnswer(peerConnection.localDescription!, remoteClientId);
                    }
                }
            });

            // As remote tracks are received, add them to the remote view
            peerConnection.addEventListener('track', event => {
                console.log('[MASTER] Received remote track from client: ' + remoteClientId);
                // if (remoteView.srcObject) {
                //     return;
                // }
                // remoteView.srcObject = event.streams[0];
                console.log(JSON.stringify(event.streams));
            });

            // If there's no video/audio, master.localStream will be null. So, we should skip adding the tracks from it.
            if (master.localStream) {
                // eslint-disable-next-line @reolink/general/disable-for-each-method
                master.localStream.
                    getTracks()
                    .forEach((track: MediaStreamTrack) => peerConnection.addTrack(track, master.localStream));
            }

            await peerConnection.setRemoteDescription(offer);
            // Create an SDP answer to send back to the client
            console.log('[MASTER] Creating SDP answer for client: ' + remoteClientId);
            const descriptionInit = await peerConnection.createAnswer({
                offerToReceiveAudio: true,
                offerToReceiveVideo: true,
            });
            // descriptionInit.toJSON = () => {
            //     return JSON.stringify(this);
            // };
            await peerConnection.setLocalDescription(
                descriptionInit
            );
            // When trickle ICE is enabled, send the answer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            if (this._useTrickleICE) {
                console.log('[MASTER] Sending SDP answer to client: ' + remoteClientId);
                // master.signalingClient!.sendSdpAnswer(peerConnection.localDescription!, remoteClientId);
                master.signalingClient!.sendSdpAnswer(peerConnection.localDescription!, remoteClientId);
            }
            console.log('[MASTER] Generating ICE candidates for client: ' + remoteClientId);
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises, @typescript-eslint/require-await
        master.signalingClient.on('iceCandidate', async (candidate: any, remoteClientId: string) => {
            console.log('[MASTER] Received ICE candidate from client: ' + remoteClientId);

            // Add the ICE candidate received from the client to the peer connection
            const peerConnection = master.peerConnectionByClientId[remoteClientId];
            await peerConnection.addIceCandidate(candidate);
        });

        master.signalingClient.on('close', () => {
            console.log('[MASTER] Disconnected from signaling channel');
        });

        master.signalingClient.on('error', () => {
            console.error('[MASTER] Signaling client error');
        });

        console.log('[MASTER] Starting master connection');
        master.signalingClient.open();

        await sleep(10000000);
    }

    public sendViewerMessage(dataChannel: WRTC.RTCDataChannel, message: string): void {
        if (dataChannel) {
            try {
                dataChannel.send(message);
            }
            catch (e) {
                console.error('[VIEWER] Send DataChannel: ', JSON.stringify(e));
            }
        }
    }
    // public async stop() {

    // }
}

async function sleep(time: number): Promise<void> {
    return new Promise(resolve => {
        setTimeout(resolve, time);
    });
}

Viewer

import * as KVSWebRTC from 'amazon-kinesis-video-streams-webrtc';
import * as AWS from 'aws-sdk';
import * as WRTC from 'wrtc';

export interface IViewer {
    peerConnection: WRTC.RTCPeerConnection | null;
    signalingClient: KVSWebRTC.SignalingClient | null;
    dataChannel: WRTC.RTCDataChannel | null;
    peerConnectionStatsInterval: any;
}
export class KvsView {
    private readonly _accessKeyId = '';

    private readonly _secretAccessKey = '';

    private readonly _region = '';

    private readonly _channelName = '';

    private readonly _sessionToken = undefined;

    private readonly _endpoint = undefined;

    private readonly _natTraversalDisabled = false;

    private readonly _forceTURN = true;

    private readonly _useTrickleICE = true;

    private readonly _widescreen = true;

    private readonly _sendVideo = true;

    private readonly _sendAudio = false;

    private readonly _openDataChannel = true;

    private _clientId = '';

    public createClientId(): string {
        return Math.random().toString(36).substring(2).toUpperCase();
    }

    public async start(): Promise<void> {
        const viewer: IViewer = {
            peerConnection: null,
            signalingClient: null,
            dataChannel: null,
            peerConnectionStatsInterval: null
        };
        this._clientId = this.createClientId();
        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: this._endpoint,
            correctClockSkew: true,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient
            .describeSignalingChannel({
                ChannelName: this._channelName,
            }).promise();
        const channelARN = describeSignalingChannelResponse.ChannelInfo!.ChannelARN;
        console.log('[VIEWER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient
            .getSignalingChannelEndpoint({
                ChannelARN: channelARN!,
                SingleMasterChannelEndpointConfiguration: {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: KVSWebRTC.Role.VIEWER,
                }
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse
            .ResourceEndpointList!.reduce((endpoints, endpoint) => {
            const index = endpoint.Protocol === undefined ? '' : endpoint.Protocol;
            (endpoints as any)[index] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        // const endpointsByProtocol = getSignalingChannelEndpointResponse.ResourceEndpointList.reduce((endpoints, endpoint) => {
        //     endpoints[endpoint.Protocol] = endpoint.ResourceEndpoint;
        //     return endpoints;
        // }, {});
        console.log('[VIEWER] Endpoints: ', endpointsByProtocol);

        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: (endpointsByProtocol as any)['HTTPS'],
            correctClockSkew: true,
        });

        // Get ICE server configuration
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
            .getIceServerConfig({
                ChannelARN: channelARN!,
            }).promise();
        const iceServers: any[] = [];
        if (!this._natTraversalDisabled && !this._forceTURN) {
            iceServers.push({ urls: `stun:stun.kinesisvideo.${this._region}.amazonaws.com:443` });
        }
        if (!this._natTraversalDisabled) {
            // eslint-disable-next-line @reolink/general/disable-for-each-method
            getIceServerConfigResponse.IceServerList!.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        }
        console.log('[VIEWER] ICE servers: ', iceServers);

        // Create Signaling Client
        viewer.signalingClient = new KVSWebRTC.SignalingClient({
            channelARN: channelARN!,
            channelEndpoint: (endpointsByProtocol as any)['WSS'],
            clientId: this._clientId,
            role: KVSWebRTC.Role.VIEWER,
            region: this._region,
            credentials: {
                accessKeyId: this._accessKeyId,
                secretAccessKey: this._secretAccessKey,
                sessionToken: this._sessionToken,
            },
            systemClockOffset: kinesisVideoClient.config.systemClockOffset,
        });

        const resolution = this._widescreen
            ? { width: { ideal: 1280 }, height: { ideal: 720 } } : { width: { ideal: 640 }, height: { ideal: 480 } };
        const constraints = {
            video: this._sendVideo ? resolution : false,
            audio: this._sendAudio,
        };
        console.log(constraints);
        const configuration = {
            iceServers,
            iceTransportPolicy: this._forceTURN ? 'relay' : 'all',
        };

        viewer.peerConnection = new WRTC.RTCPeerConnection({
            iceTransportPolicy: configuration.iceTransportPolicy as WRTC.RTCIceTransportPolicy,
            iceServers: configuration.iceServers
        });

        if (this._openDataChannel) {
            // viewer.dataChannel = viewer.peerConnection.createDataChannel('hello-data');
            // viewer.dataChannel.onmessage = (event) => {
            //     console.log('onmessage-event-->' + JSON.stringify(event));
            // };
            viewer.peerConnection.ondatachannel = (event) => {
                event.channel.onmessage = (event) => {
                    console.log('onmessage-event-->' + JSON.stringify(event));
                };
                event.channel.onopen = (event) => {
                    console.log('onopen-event-->' + JSON.stringify(event));
                    // viewer.dataChannel!.send('hello,world');
                };
                event.channel.onclose = (event) => {
                    console.log('close-event-->' + JSON.stringify(event));
                };
                event.channel.onerror = (event) => {
                    console.log('error-event-->' + JSON.stringify(event));
                };
            };

        }

        // Poll for connection stats
        viewer.peerConnectionStatsInterval =
            // eslint-disable-next-line @typescript-eslint/no-misused-promises
            setInterval(() => viewer.peerConnection!.getStats().then((report) => {

                // console.log('iceConnectionState--->' + viewer.peerConnection!.iceConnectionState);
                // console.log('readyState-->' + viewer.dataChannel!.readyState);
                // if (viewer.dataChannel!.readyState === 'open') {
                //     console.log('send----------------------------');
                //     this.sendViewerMessage(viewer.dataChannel!, 'hello,world');
                // }
                console.log(JSON.stringify(report));
            }), 1000);

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        viewer.signalingClient.on('open', async () => {
            console.log('[VIEWER] Connected to signaling service');

            // Get a stream from the webcam, add it to the peer connection, and display it in the local view.
            // If no video/audio needed, no need to request for the sources.
            // Otherwise, the browser will throw an error saying that either video or audio has to be enabled.
            if (this._sendVideo || this._sendAudio) {
                try {
                    // viewer.localStream = await navigator.mediaDevices.getUserMedia(constraints);
                    // viewer.localStream.getTracks().forEach(track => viewer.peerConnection.addTrack(track, viewer.localStream));
                    // localView.srcObject = viewer.localStream;
                }
                catch (e) {
                    console.error('[VIEWER] Could not find webcam' + JSON.stringify(e));
                    return;
                }
            }
            const descriptionInit = await viewer.peerConnection!.createOffer({
                offerToReceiveAudio: true,
                offerToReceiveVideo: true,
            });
            // Create an SDP offer to send to the master
            console.log('[VIEWER] Creating SDP offer');
            await viewer.peerConnection!.setLocalDescription(
                descriptionInit
            );

            // When trickle ICE is enabled, send the offer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            if (this._useTrickleICE) {
                console.log('[VIEWER] Sending SDP offer');
                viewer.signalingClient!.sendSdpOffer(viewer.peerConnection!.localDescription!);
            }
            console.log('[VIEWER] Generating ICE candidates');
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        viewer.signalingClient.on('sdpAnswer', async answer => {
            // Add the SDP answer to the peer connection
            console.log('[VIEWER] Received SDP answer');
            await viewer.peerConnection!.setRemoteDescription(answer);
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        viewer.signalingClient.on('iceCandidate', async (candidate) => {
            // Add the ICE candidate received from the MASTER to the peer connection
            console.log('[VIEWER] Received ICE candidate');
            await viewer.peerConnection!.addIceCandidate(candidate);
        });

        viewer.signalingClient.on('close', () => {
            console.log('[VIEWER] Disconnected from signaling channel');
        });

        viewer.signalingClient.on('error', error => {
            console.error('[VIEWER] Signaling client error: ', error);
        });

        // Send any ICE candidates to the other peer
        viewer.peerConnection.addEventListener('icecandidate', ({ candidate }) => {
            if (candidate) {
                console.log('[VIEWER] Generated ICE candidate');

                // When trickle ICE is enabled, send the ICE candidates as they are generated.
                if (this._useTrickleICE) {
                    console.log('[VIEWER] Sending ICE candidate');
                    viewer.signalingClient!.sendIceCandidate(candidate);
                }
            }
            else {
                console.log('[VIEWER] All ICE candidates have been generated');
                // When trickle ICE is disabled, send the offer now that all the ICE candidates have ben generated.
                if (!this._useTrickleICE) {
                    console.log('[VIEWER] Sending SDP offer');
                    viewer.signalingClient!.sendSdpOffer(viewer.peerConnection!.localDescription!);
                }
            }
        });

        // As remote tracks are received, add them to the remote view
        viewer.peerConnection.addEventListener('track', event => {
            console.log('[VIEWER] Received remote track');
            console.log('stream-->' + JSON.stringify(event));
            // if (remoteView.srcObject) {
            //     return;
            // }
            // viewer.remoteStream = event.streams[0];
            // remoteView.srcObject = viewer.remoteStream;
        });

        console.log('[VIEWER] Starting viewer connection');
        viewer.signalingClient.open();

        await sleep(100000000);
    }

    public sendViewerMessage(dataChannel: WRTC.RTCDataChannel, message: string): void {
        if (dataChannel) {
            try {
                dataChannel.send(message);
            }
            catch (e) {
                console.error('[VIEWER] Send DataChannel: ', JSON.stringify(e));
            }
        }
    }

}

async function sleep(time: number): Promise<void> {
    return new Promise(resolve => {
        setTimeout(resolve, time);
    });
}

master logs

image

viewer logs

image

image

2--->createDataChannel from viewer succeed

master

import * as KVSWebRTC from 'amazon-kinesis-video-streams-webrtc';
import * as AWS from 'aws-sdk';
import * as WRTC from 'wrtc';

export interface IMaster {
    signalingClient: KVSWebRTC.SignalingClient | null;
    peerConnectionByClientId: any;
    dataChannelByClientId: any;
    localStream: any;
    remoteStreams: any;
    peerConnectionStatsInterval: any;
}
export class KvsManager {

    private readonly _accessKeyId = '';

    private readonly _secretAccessKey = '';

    private readonly _region = '';

    private readonly _channelName = '';

    private readonly _sessionToken = undefined;

    private readonly _natTraversalDisabled = false;

    private readonly _forceTURN = true;

    private readonly _useTrickleICE = true;

    private readonly _widescreen = true;

    private readonly _sendVideo = true;

    private readonly _sendAudio = false;

    private readonly _openDataChannel = true;

    public async start(): Promise<void> {

        const master: IMaster = {
            signalingClient: null,
            peerConnectionByClientId: {},
            dataChannelByClientId: {},
            localStream: null,
            remoteStreams: [],
            peerConnectionStatsInterval: null,
        };

        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: undefined,
            correctClockSkew: true,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient
            .describeSignalingChannel({
                ChannelName: this._channelName,
            }).promise();
        if (describeSignalingChannelResponse === undefined) {
            throw new Error('describeSignalingChannelResponse === undefined');
        }
        const channelARN = describeSignalingChannelResponse.ChannelInfo!.ChannelARN!;
        console.log('[MASTER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient
            .getSignalingChannelEndpoint({
                ChannelARN: channelARN,
                SingleMasterChannelEndpointConfiguration:
                {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: KVSWebRTC.Role.MASTER
                }
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse
            .ResourceEndpointList!.reduce((endpoints, endpoint) => {
            const index = endpoint.Protocol === undefined ? '' : endpoint.Protocol;
            (endpoints as any)[index] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        console.log('[MASTER] Endpoints: ', endpointsByProtocol);
        // Create Signaling Client
        master.signalingClient = new KVSWebRTC.SignalingClient({
            channelARN,
            channelEndpoint: (endpointsByProtocol as any)['WSS'] ?? '',
            role: KVSWebRTC.Role.MASTER,
            region: this._region,
            credentials: {
                accessKeyId: this._accessKeyId,
                secretAccessKey: this._secretAccessKey,
                sessionToken: this._sessionToken,
            },
            systemClockOffset: kinesisVideoClient.config.systemClockOffset,
        });

        // Get ICE server configuration
        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: (endpointsByProtocol as any)['HTTPS'],
            correctClockSkew: true,
        });
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
            .getIceServerConfig({
                ChannelARN: channelARN,
            }).promise();
        const iceServers = [];

        if (!this._natTraversalDisabled && !this._forceTURN) {
            iceServers.push({ urls: `stun:stun.kinesisvideo.${this._region}.amazonaws.com:443` });
        }
        if (!this._natTraversalDisabled) {
            // eslint-disable-next-line @reolink/general/disable-for-each-method
            getIceServerConfigResponse.IceServerList!.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        }
        console.log('[MASTER] ICE servers: ', iceServers);

        const configuration = {
            iceServers,
            iceTransportPolicy: this._forceTURN ? 'relay' : 'all',
        };

        const resolution = this._widescreen ?
            { width: { ideal: 1280 }, height: { ideal: 720 } } : { width: { ideal: 640 }, height: { ideal: 480 } };
        const constraints = {
            video: this._sendVideo ? resolution : false,
            audio: this._sendAudio,
        };

        // Get a stream from the webcam and display it in the local view.
        // If no video/audio needed, no need to request for the sources.
        // Otherwise, the browser will throw an error saying that either video or audio has to be enabled.
        if (this._sendVideo || this._sendAudio) {
            try {
                master.localStream = await navigator.mediaDevices.getUserMedia(constraints);
                // localView.srcObject = master.localStream;
            }
            catch (e) {
                console.error('[MASTER] Could not find webcam');
                console.log(JSON.stringify(e));
            }
        }

        // eslint-disable-next-line @typescript-eslint/no-misused-promises, @typescript-eslint/require-await
        master.signalingClient.on('open', () => {
            console.log('[MASTER] Connected to signaling service');
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        master.signalingClient.on('sdpOffer', async (offer: WRTC.RTCSessionDescriptionInit, remoteClientId: string) => {
            console.log('[MASTER] Received SDP offer from client: ' + remoteClientId);
            // Create a new peer connection using the offer from the given client
            const peerConnection = new WRTC.RTCPeerConnection({
                iceTransportPolicy: configuration.iceTransportPolicy as WRTC.RTCIceTransportPolicy,
                iceServers: configuration.iceServers
            });
            master.peerConnectionByClientId[remoteClientId] = peerConnection;
            if (this._openDataChannel) {
                // master.dataChannelByClientId[remoteClientId] = peerConnection.createDataChannel('hello-data');
                // (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onerror = (e) => {
                //     console.log('error--->' + JSON.stringify(e));
                // };
                // (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onopen = (e) => {
                //     console.log('open--->' + JSON.stringify(e));
                // };
                // (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onclose = (e) => {
                //     console.log('close--->' + JSON.stringify(e));
                // };
                // (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onclosing = (e) => {
                //     console.log('closing--->' + JSON.stringify(e));
                // };
                // (master.dataChannelByClientId[remoteClientId] as WRTC.RTCDataChannel).onmessage = (e) => {
                //     console.log('message--->' + JSON.stringify(e));
                // };
                peerConnection.ondatachannel = (event) => {
                    const channel = event.channel;
                    event.channel.onopen = (event) => {
                        console.log('onopen-event-->' + JSON.stringify(event));
                        console.log('readyState-->' + channel.readyState);
                        channel.send('fuck,cao');
                    };
                    event.channel.onmessage = (event) => {
                        console.log('onmessage-event-->' + JSON.stringify(event));
                    };
                    event.channel.onclose = (event) => {
                        console.log('close-event-->' + JSON.stringify(event));
                    };
                    event.channel.onerror = (event) => {
                        console.log('error-event-->' + JSON.stringify(event));
                    };
                    console.log('event.channel.onmessage = onRemoteDataMessage' + JSON.stringify(event));
                };
            }

            // Poll for connection stats
            if (!master.peerConnectionStatsInterval) {
                master.peerConnectionStatsInterval =
                // eslint-disable-next-line @typescript-eslint/no-misused-promises
                setInterval(() => peerConnection.getStats().then(
                    (report) => {
                        console.log(JSON.stringify(report));
                        // console.log('iceConnectionState--->' + master.peerConnectionByClientId[remoteClientId].iceConnectionState);
                        // console.log('readyState-->' + master.dataChannelByClientId[remoteClientId].readyState);
                        // if (master.dataChannelByClientId[remoteClientId].readyState === 'open') {
                        //     console.log('send----------------------------');
                        //     this.sendViewerMessage(master.dataChannelByClientId[remoteClientId], 'hello,world');
                        // }
                    }
                ), 1000);
                // setInterval(() => peerConnection.getStats().then(), 1000);
            }
            // Send any ICE candidates to the other peer
            peerConnection.addEventListener('icecandidate', ({ candidate }) => {
                if (candidate) {
                    console.log('[MASTER] Generated ICE candidate for client: ' + remoteClientId);

                    // When trickle ICE is enabled, send the ICE candidates as they are generated.
                    if (this._useTrickleICE) {
                        console.log('[MASTER] Sending ICE candidate to client: ' + remoteClientId);
                        master.signalingClient!.sendIceCandidate(candidate, remoteClientId);
                    }
                }
                else {
                    console.log('[MASTER] All ICE candidates have been generated for client: ' + remoteClientId);

                    // When trickle ICE is disabled, send the answer now that all the ICE candidates have ben generated.
                    if (!this._useTrickleICE) {
                        console.log('[MASTER] Sending SDP answer to client: ' + remoteClientId);
                        master.signalingClient!.sendSdpAnswer(peerConnection.localDescription!, remoteClientId);
                    }
                }
            });

            // As remote tracks are received, add them to the remote view
            peerConnection.addEventListener('track', event => {
                console.log('[MASTER] Received remote track from client: ' + remoteClientId);
                // if (remoteView.srcObject) {
                //     return;
                // }
                // remoteView.srcObject = event.streams[0];
                console.log(JSON.stringify(event.streams));
            });

            // If there's no video/audio, master.localStream will be null. So, we should skip adding the tracks from it.
            if (master.localStream) {
                // eslint-disable-next-line @reolink/general/disable-for-each-method
                master.localStream.
                    getTracks()
                    .forEach((track: MediaStreamTrack) => peerConnection.addTrack(track, master.localStream));
            }

            await peerConnection.setRemoteDescription(offer);
            // Create an SDP answer to send back to the client
            console.log('[MASTER] Creating SDP answer for client: ' + remoteClientId);
            const descriptionInit = await peerConnection.createAnswer({
                offerToReceiveAudio: true,
                offerToReceiveVideo: true,
            });
            // descriptionInit.toJSON = () => {
            //     return JSON.stringify(this);
            // };
            await peerConnection.setLocalDescription(
                descriptionInit
            );
            // When trickle ICE is enabled, send the answer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            if (this._useTrickleICE) {
                console.log('[MASTER] Sending SDP answer to client: ' + remoteClientId);
                // master.signalingClient!.sendSdpAnswer(peerConnection.localDescription!, remoteClientId);
                master.signalingClient!.sendSdpAnswer(peerConnection.localDescription!, remoteClientId);
            }
            console.log('[MASTER] Generating ICE candidates for client: ' + remoteClientId);
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises, @typescript-eslint/require-await
        master.signalingClient.on('iceCandidate', async (candidate: any, remoteClientId: string) => {
            console.log('[MASTER] Received ICE candidate from client: ' + remoteClientId);

            // Add the ICE candidate received from the client to the peer connection
            const peerConnection = master.peerConnectionByClientId[remoteClientId];
            await peerConnection.addIceCandidate(candidate);
        });

        master.signalingClient.on('close', () => {
            console.log('[MASTER] Disconnected from signaling channel');
        });

        master.signalingClient.on('error', () => {
            console.error('[MASTER] Signaling client error');
        });

        console.log('[MASTER] Starting master connection');
        master.signalingClient.open();

        await sleep(10000000);
    }

    public sendViewerMessage(dataChannel: WRTC.RTCDataChannel, message: string): void {
        if (dataChannel) {
            try {
                dataChannel.send(message);
            }
            catch (e) {
                console.error('[VIEWER] Send DataChannel: ', JSON.stringify(e));
            }
        }
    }
    // public async stop() {

    // }
}

async function sleep(time: number): Promise<void> {
    return new Promise(resolve => {
        setTimeout(resolve, time);
    });
}

Viewer

import * as KVSWebRTC from 'amazon-kinesis-video-streams-webrtc';
import * as AWS from 'aws-sdk';
import * as WRTC from 'wrtc';

export interface IViewer {
    peerConnection: WRTC.RTCPeerConnection | null;
    signalingClient: KVSWebRTC.SignalingClient | null;
    dataChannel: WRTC.RTCDataChannel | null;
    peerConnectionStatsInterval: any;
}
export class KvsView {
    private readonly _accessKeyId = '';

    private readonly _secretAccessKey = '';

    private readonly _region = '';

    private readonly _channelName = '';

    private readonly _sessionToken = undefined;

    private readonly _endpoint = undefined;

    private readonly _natTraversalDisabled = false;

    private readonly _forceTURN = true;

    private readonly _useTrickleICE = true;

    private readonly _widescreen = true;

    private readonly _sendVideo = true;

    private readonly _sendAudio = false;

    private readonly _openDataChannel = true;

    private _clientId = '';

    public createClientId(): string {
        return Math.random().toString(36).substring(2).toUpperCase();
    }

    public async start(): Promise<void> {
        const viewer: IViewer = {
            peerConnection: null,
            signalingClient: null,
            dataChannel: null,
            peerConnectionStatsInterval: null
        };
        this._clientId = this.createClientId();
        // Create KVS client
        const kinesisVideoClient = new AWS.KinesisVideo({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: this._endpoint,
            correctClockSkew: true,
        });

        // Get signaling channel ARN
        const describeSignalingChannelResponse = await kinesisVideoClient
            .describeSignalingChannel({
                ChannelName: this._channelName,
            }).promise();
        const channelARN = describeSignalingChannelResponse.ChannelInfo!.ChannelARN;
        console.log('[VIEWER] Channel ARN: ', channelARN);

        // Get signaling channel endpoints
        const getSignalingChannelEndpointResponse = await kinesisVideoClient
            .getSignalingChannelEndpoint({
                ChannelARN: channelARN!,
                SingleMasterChannelEndpointConfiguration: {
                    Protocols: ['WSS', 'HTTPS'],
                    Role: KVSWebRTC.Role.VIEWER,
                }
            }).promise();

        const endpointsByProtocol = getSignalingChannelEndpointResponse
            .ResourceEndpointList!.reduce((endpoints, endpoint) => {
            const index = endpoint.Protocol === undefined ? '' : endpoint.Protocol;
            (endpoints as any)[index] = endpoint.ResourceEndpoint;
            return endpoints;
        }, {});
        // const endpointsByProtocol = getSignalingChannelEndpointResponse.ResourceEndpointList.reduce((endpoints, endpoint) => {
        //     endpoints[endpoint.Protocol] = endpoint.ResourceEndpoint;
        //     return endpoints;
        // }, {});
        console.log('[VIEWER] Endpoints: ', endpointsByProtocol);

        const kinesisVideoSignalingChannelsClient = new AWS.KinesisVideoSignalingChannels({
            region: this._region,
            accessKeyId: this._accessKeyId,
            secretAccessKey: this._secretAccessKey,
            sessionToken: this._sessionToken,
            endpoint: (endpointsByProtocol as any)['HTTPS'],
            correctClockSkew: true,
        });

        // Get ICE server configuration
        const getIceServerConfigResponse = await kinesisVideoSignalingChannelsClient
            .getIceServerConfig({
                ChannelARN: channelARN!,
            }).promise();
        const iceServers: any[] = [];
        if (!this._natTraversalDisabled && !this._forceTURN) {
            iceServers.push({ urls: `stun:stun.kinesisvideo.${this._region}.amazonaws.com:443` });
        }
        if (!this._natTraversalDisabled) {
            // eslint-disable-next-line @reolink/general/disable-for-each-method
            getIceServerConfigResponse.IceServerList!.forEach(iceServer =>
                iceServers.push({
                    urls: iceServer.Uris,
                    username: iceServer.Username,
                    credential: iceServer.Password,
                }),
            );
        }
        console.log('[VIEWER] ICE servers: ', iceServers);

        // Create Signaling Client
        viewer.signalingClient = new KVSWebRTC.SignalingClient({
            channelARN: channelARN!,
            channelEndpoint: (endpointsByProtocol as any)['WSS'],
            clientId: this._clientId,
            role: KVSWebRTC.Role.VIEWER,
            region: this._region,
            credentials: {
                accessKeyId: this._accessKeyId,
                secretAccessKey: this._secretAccessKey,
                sessionToken: this._sessionToken,
            },
            systemClockOffset: kinesisVideoClient.config.systemClockOffset,
        });

        const resolution = this._widescreen
            ? { width: { ideal: 1280 }, height: { ideal: 720 } } : { width: { ideal: 640 }, height: { ideal: 480 } };
        const constraints = {
            video: this._sendVideo ? resolution : false,
            audio: this._sendAudio,
        };
        console.log(constraints);
        const configuration = {
            iceServers,
            iceTransportPolicy: this._forceTURN ? 'relay' : 'all',
        };

        viewer.peerConnection = new WRTC.RTCPeerConnection({
            iceTransportPolicy: configuration.iceTransportPolicy as WRTC.RTCIceTransportPolicy,
            iceServers: configuration.iceServers
        });

        if (this._openDataChannel) {
            viewer.dataChannel = viewer.peerConnection.createDataChannel('hello-data');
            viewer.dataChannel.onmessage = (event) => {
                console.log('onmessage-event-->' + JSON.stringify(event));
            };
            // viewer.peerConnection.ondatachannel = (event) => {
            //     event.channel.onmessage = (event) => {
            //         console.log('onmessage-event-->' + JSON.stringify(event));
            //     };
            //     event.channel.onopen = (event) => {
            //         console.log('onopen-event-->' + JSON.stringify(event));
            //         // viewer.dataChannel!.send('hello,world');
            //     };
            //     event.channel.onclose = (event) => {
            //         console.log('close-event-->' + JSON.stringify(event));
            //     };
            //     event.channel.onerror = (event) => {
            //         console.log('error-event-->' + JSON.stringify(event));
            //     };
            // };

        }

        // Poll for connection stats
        viewer.peerConnectionStatsInterval =
            // eslint-disable-next-line @typescript-eslint/no-misused-promises
            setInterval(() => viewer.peerConnection!.getStats().then((report) => {

                console.log('iceConnectionState--->' + viewer.peerConnection!.iceConnectionState);
                console.log('readyState-->' + viewer.dataChannel!.readyState);
                if (viewer.dataChannel!.readyState === 'open') {
                    console.log('send----------------------------');
                    this.sendViewerMessage(viewer.dataChannel!, 'hello,world');
                }
                console.log(JSON.stringify(report));
            }), 1000);

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        viewer.signalingClient.on('open', async () => {
            console.log('[VIEWER] Connected to signaling service');

            // Get a stream from the webcam, add it to the peer connection, and display it in the local view.
            // If no video/audio needed, no need to request for the sources.
            // Otherwise, the browser will throw an error saying that either video or audio has to be enabled.
            if (this._sendVideo || this._sendAudio) {
                try {
                    // viewer.localStream = await navigator.mediaDevices.getUserMedia(constraints);
                    // viewer.localStream.getTracks().forEach(track => viewer.peerConnection.addTrack(track, viewer.localStream));
                    // localView.srcObject = viewer.localStream;
                }
                catch (e) {
                    console.error('[VIEWER] Could not find webcam' + JSON.stringify(e));
                    return;
                }
            }
            const descriptionInit = await viewer.peerConnection!.createOffer({
                offerToReceiveAudio: true,
                offerToReceiveVideo: true,
            });
            // Create an SDP offer to send to the master
            console.log('[VIEWER] Creating SDP offer');
            await viewer.peerConnection!.setLocalDescription(
                descriptionInit
            );

            // When trickle ICE is enabled, send the offer now and then send ICE candidates as they are generated. Otherwise wait on the ICE candidates.
            if (this._useTrickleICE) {
                console.log('[VIEWER] Sending SDP offer');
                viewer.signalingClient!.sendSdpOffer(viewer.peerConnection!.localDescription!);
            }
            console.log('[VIEWER] Generating ICE candidates');
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        viewer.signalingClient.on('sdpAnswer', async answer => {
            // Add the SDP answer to the peer connection
            console.log('[VIEWER] Received SDP answer');
            await viewer.peerConnection!.setRemoteDescription(answer);
        });

        // eslint-disable-next-line @typescript-eslint/no-misused-promises
        viewer.signalingClient.on('iceCandidate', async (candidate) => {
            // Add the ICE candidate received from the MASTER to the peer connection
            console.log('[VIEWER] Received ICE candidate');
            await viewer.peerConnection!.addIceCandidate(candidate);
        });

        viewer.signalingClient.on('close', () => {
            console.log('[VIEWER] Disconnected from signaling channel');
        });

        viewer.signalingClient.on('error', error => {
            console.error('[VIEWER] Signaling client error: ', error);
        });

        // Send any ICE candidates to the other peer
        viewer.peerConnection.addEventListener('icecandidate', ({ candidate }) => {
            if (candidate) {
                console.log('[VIEWER] Generated ICE candidate');

                // When trickle ICE is enabled, send the ICE candidates as they are generated.
                if (this._useTrickleICE) {
                    console.log('[VIEWER] Sending ICE candidate');
                    viewer.signalingClient!.sendIceCandidate(candidate);
                }
            }
            else {
                console.log('[VIEWER] All ICE candidates have been generated');
                // When trickle ICE is disabled, send the offer now that all the ICE candidates have ben generated.
                if (!this._useTrickleICE) {
                    console.log('[VIEWER] Sending SDP offer');
                    viewer.signalingClient!.sendSdpOffer(viewer.peerConnection!.localDescription!);
                }
            }
        });

        // As remote tracks are received, add them to the remote view
        viewer.peerConnection.addEventListener('track', event => {
            console.log('[VIEWER] Received remote track');
            console.log('stream-->' + JSON.stringify(event));
            // if (remoteView.srcObject) {
            //     return;
            // }
            // viewer.remoteStream = event.streams[0];
            // remoteView.srcObject = viewer.remoteStream;
        });

        console.log('[VIEWER] Starting viewer connection');
        viewer.signalingClient.open();

        await sleep(100000000);
    }

    public sendViewerMessage(dataChannel: WRTC.RTCDataChannel, message: string): void {
        if (dataChannel) {
            try {
                dataChannel.send(message);
            }
            catch (e) {
                console.error('[VIEWER] Send DataChannel: ', JSON.stringify(e));
            }
        }
    }

}

async function sleep(time: number): Promise<void> {
    return new Promise(resolve => {
        setTimeout(resolve, time);
    });
}

master logs

image

viewer logs

image

image

niyatim23 commented 1 year ago

Hi @helloworld098765, do you see the same issue when you try to use the sample application here as is?

helloworld098765 commented 1 year ago

Hi @helloworld098765, do you see the same issue when you try to use the sample application here as is?

I have tested it using the repository demos.

  1. When I createDataChannel from Master, DataChannel is not available and I can check the status of DataChannel. But when I create a DataChannel in Master and Viewer at the same time, both DataChannels are available and can send and receive data normally.
disa6302 commented 1 year ago

@helloworld098765 ,

We fixed some issues related to data channel in this SDK and the fix is here: https://github.com/awslabs/amazon-kinesis-video-streams-webrtc-sdk-js/pull/213

Please reach out to us if you are facing issues after taking in this change.