123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538 |
- // Copyright Epic Games, Inc. All Rights Reserved.
- // universal module definition - read https://www.davidbcalhoun.com/2014/what-is-amd-commonjs-and-umd/
- (function (root, factory) {
- if (typeof define === 'function' && define.amd) {
- // AMD. Register as an anonymous module.
- define(["./adapter"], factory);
- } else if (typeof exports === 'object') {
- // Node. Does not work with strict CommonJS, but
- // only CommonJS-like environments that support module.exports,
- // like Node.
- module.exports = factory(require("./adapter"));
- } else {
- // Browser globals (root is window)
-
- root.webRtcPlayer = factory(root.adapter);
- }
- }(this, function (adapter) {
- function webRtcPlayer(parOptions) {
- parOptions = typeof parOptions !== 'undefined' ? parOptions : {};
-
- var self = this;
- //**********************
- //Config setup
- //**********************
- this.cfg = typeof parOptions.peerConnectionOptions !== 'undefined' ? parOptions.peerConnectionOptions : {};
- this.cfg.sdpSemantics = 'unified-plan';
- // this.cfg.rtcAudioJitterBufferMaxPackets = 10;
- // this.cfg.rtcAudioJitterBufferFastAccelerate = true;
- // this.cfg.rtcAudioJitterBufferMinDelayMs = 0;
- // If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
- // However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
- // tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
- this.cfg.offerExtmapAllowMixed = false;
- //**********************
- //Variables
- //**********************
- this.pcClient = null;
- this.dcClient = null;
- this.tnClient = null;
- this.sdpConstraints = {
- offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
- offerToReceiveVideo: 1,
- voiceActivityDetection: false
- };
- // See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
- this.dataChannelOptions = {ordered: true};
- // This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
- this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
- this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
- // To enable mic in browser use SSL/localhost and have ?useMic in the query string.
- const urlParams = new URLSearchParams(window.location.search);
- this.useMic = urlParams.has('useMic');
- if(!this.useMic)
- {
- console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
- }
- // When ?useMic check for SSL or localhost
- let isLocalhostConnection = location.hostname === "localhost" || location.hostname === "127.0.0.1";
- let isHttpsConnection = location.protocol === 'https:';
- if(this.useMic && !isLocalhostConnection && !isHttpsConnection)
- {
- this.useMic = false;
- console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
- console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
- }
- // Latency tester
- this.latencyTestTimings =
- {
- TestStartTimeMs: null,
- UEReceiptTimeMs: null,
- UEPreCaptureTimeMs: null,
- UEPostCaptureTimeMs: null,
- UEPreEncodeTimeMs: null,
- UEPostEncodeTimeMs: null,
- UETransmissionTimeMs: null,
- BrowserReceiptTimeMs: null,
- FrameDisplayDeltaTimeMs: null,
- Reset: function()
- {
- this.TestStartTimeMs = null;
- this.UEReceiptTimeMs = null;
- this.UEPreCaptureTimeMs = null;
- this.UEPostCaptureTimeMs = null;
- this.UEPreEncodeTimeMs = null;
- this.UEPostEncodeTimeMs = null;
- this.UETransmissionTimeMs = null;
- this.BrowserReceiptTimeMs = null;
- this.FrameDisplayDeltaTimeMs = null;
- },
- SetUETimings: function(UETimings)
- {
- this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
- this.UEPreCaptureTimeMs = UETimings.PreCaptureTimeMs;
- this.UEPostCaptureTimeMs = UETimings.PostCaptureTimeMs;
- this.UEPreEncodeTimeMs = UETimings.PreEncodeTimeMs;
- this.UEPostEncodeTimeMs = UETimings.PostEncodeTimeMs;
- this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
- this.BrowserReceiptTimeMs = Date.now();
- this.OnAllLatencyTimingsReady(this);
- },
- SetFrameDisplayDeltaTime: function(DeltaTimeMs)
- {
- if(this.FrameDisplayDeltaTimeMs == null)
- {
- this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
- this.OnAllLatencyTimingsReady(this);
- }
- },
- OnAllLatencyTimingsReady: function(Timings){}
- }
- //**********************
- //Functions
- //**********************
- //Create Video element and expose that as a parameter
- this.createWebRtcVideo = function() {
- var video = document.createElement('video');
- video.id = "streamingVideo";
- video.playsInline = true;
- video.disablepictureinpicture = true;
- // video.muted = self.startVideoMuted;
- video.muted = true;
-
- video.addEventListener('loadedmetadata', function(e){
- if(self.onVideoInitialised){
- self.onVideoInitialised();
- }
- }, true);
-
- // Check if request video frame callback is supported
- if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
- // The API is supported!
-
- const onVideoFrameReady = (now, metadata) => {
-
- if(metadata.receiveTime && metadata.expectedDisplayTime)
- {
- const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
- self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
- }
-
-
- // Re-register the callback to be notified about the next frame.
- video.requestVideoFrameCallback(onVideoFrameReady);
- };
-
- // Initially register the callback to be notified about the first frame.
- video.requestVideoFrameCallback(onVideoFrameReady);
- }
-
- return video;
- }
- this.video = this.createWebRtcVideo();
- onsignalingstatechange = function(state) {
- console.info('signaling state change:', state)
- };
- oniceconnectionstatechange = function(state) {
- console.info('ice connection state change:', state)
- };
- onicegatheringstatechange = function(state) {
- console.info('ice gathering state change:', state)
- };
- handleOnTrack = function(e) {
- console.log('handleOnTrack', e.streams);
-
- if (e.track)
- {
- console.log('Got track - ' + e.track.kind + ' id=' + e.track.id + ' readyState=' + e.track.readyState);
- }
-
- if(e.track.kind == "audio")
- {
- handleOnAudioTrack(e.streams[0]);
- return;
- }
- else(e.track.kind == "video" && self.video.srcObject !== e.streams[0])
- {
- self.video.srcObject = e.streams[0];
- console.log('Set video source from video track ontrack.');
- return;
- }
-
- };
- handleOnAudioTrack = function(audioMediaStream)
- {
- // do nothing the video has the same media stream as the audio track we have here (they are linked)
- if(self.video.srcObject == audioMediaStream)
- {
- return;
- }
- // video element has some other media stream that is not associated with this audio track
- else if(self.video.srcObject && self.video.srcObject !== audioMediaStream)
- {
- // create a new audio element
- let audioElem = document.createElement("Audio");
- audioElem.srcObject = audioMediaStream;
- // there is no way to autoplay audio (even muted), so we defer audio until first click
- if(!self.autoPlayAudio) {
- let clickToPlayAudio = function() {
- audioElem.play();
- self.video.removeEventListener("click", clickToPlayAudio);
- };
- self.video.addEventListener("click", clickToPlayAudio);
- }
- // we assume the user has clicked somewhere on the page and autoplaying audio will work
- else {
- audioElem.play();
- }
- console.log('Created new audio element to play seperate audio stream.');
- }
- }
- setupDataChannel = function(pc, label, options) {
- try {
- let datachannel = pc.createDataChannel(label, options);
- console.log(`Created datachannel (${label})`)
- // Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
- datachannel.binaryType = "arraybuffer";
-
- datachannel.onopen = function (e) {
- console.log(`data channel (${label}) connect`)
- if(self.onDataChannelConnected){
- self.onDataChannelConnected();
- }
- }
- datachannel.onclose = function (e) {
- console.log(`data channel (${label}) closed`)
- }
- datachannel.onmessage = function (e) {
- //console.log(`Got message (${label})`, e.data)
- if (self.onDataChannelMessage)
- self.onDataChannelMessage(e.data);
- }
- return datachannel;
- } catch (e) {
- console.warn('No data channel', e);
- return null;
- }
- }
- onicecandidate = function (e) {
- console.log('ICE candidate', e)
- if (e.candidate && e.candidate.candidate) {
- self.onWebRtcCandidate(e.candidate);
- }
- };
- handleCreateOffer = function (pc) {
- pc.createOffer(self.sdpConstraints).then(function (offer) {
-
- // Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
- mungeSDPOffer(offer);
- // Set our munged SDP on the local peer connection so it is "set" and will be send across
- pc.setLocalDescription(offer);
- if (self.onWebRtcOffer) {
- self.onWebRtcOffer(offer);
- }
- },
- function () { console.warn("Couldn't create offer") });
- }
- mungeSDPOffer = function (offer) {
- // turn off video-timing sdp sent from browser
- //offer.sdp = offer.sdp.replace("http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", "");
- // this indicate we support stereo (Chrome needs this)
- offer.sdp = offer.sdp.replace('useinbandfec=1', 'useinbandfec=1;stereo=1;sprop-maxcapturerate=48000');
- }
-
- setupPeerConnection = function (pc) {
- if (pc.SetBitrate)
- console.log("Hurray! there's RTCPeerConnection.SetBitrate function");
- //Setup peerConnection events
- pc.onsignalingstatechange = onsignalingstatechange;
- pc.oniceconnectionstatechange = oniceconnectionstatechange;
- pc.onicegatheringstatechange = onicegatheringstatechange;
- pc.ontrack = handleOnTrack;
- pc.onicecandidate = onicecandidate;
- };
- generateAggregatedStatsFunction = function(){
- if(!self.aggregatedStats)
- self.aggregatedStats = {};
- return function(stats){
- //console.log('Printing Stats');
- let newStat = {};
-
- stats.forEach(stat => {
- // console.log(JSON.stringify(stat, undefined, 4));
- if (stat.type == 'inbound-rtp'
- && !stat.isRemote
- && (stat.mediaType == 'video' || stat.id.toLowerCase().includes('video'))) {
- newStat.timestamp = stat.timestamp;
- newStat.bytesReceived = stat.bytesReceived;
- newStat.framesDecoded = stat.framesDecoded;
- newStat.packetsLost = stat.packetsLost;
- newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived;
- newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded;
- newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp;
- if(self.aggregatedStats && self.aggregatedStats.timestamp){
- if(self.aggregatedStats.bytesReceived){
- // bitrate = bits received since last time / number of ms since last time
- //This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
- newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp);
- newStat.bitrate = Math.floor(newStat.bitrate);
- newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate
- newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate
- }
- if(self.aggregatedStats.bytesReceivedStart){
- newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart);
- newStat.avgBitrate = Math.floor(newStat.avgBitrate);
- }
- if(self.aggregatedStats.framesDecoded){
- // framerate = frames decoded since last time / number of seconds since last time
- newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000);
- newStat.framerate = Math.floor(newStat.framerate);
- newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate
- newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate
- }
- if(self.aggregatedStats.framesDecodedStart){
- newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000);
- newStat.avgframerate = Math.floor(newStat.avgframerate);
- }
- }
- }
- //Read video track stats
- if(stat.type == 'track' && (stat.trackIdentifier == 'video_label' || stat.kind == 'video')) {
- newStat.framesDropped = stat.framesDropped;
- newStat.framesReceived = stat.framesReceived;
- newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100;
- newStat.frameHeight = stat.frameHeight;
- newStat.frameWidth = stat.frameWidth;
- newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight;
- newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth;
- }
- if(stat.type =='candidate-pair' && stat.hasOwnProperty('currentRoundTripTime') && stat.currentRoundTripTime != 0){
- newStat.currentRoundTripTime = stat.currentRoundTripTime;
- }
- });
-
- if(self.aggregatedStats.receiveToCompositeMs)
- {
- newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
- self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
- }
-
- self.aggregatedStats = newStat;
- if(self.onAggregatedStats)
- self.onAggregatedStats(newStat)
- }
- };
- setupTracksToSendAsync = async function(pc){
-
- // Setup a transceiver for getting UE video
- pc.addTransceiver("video", { direction: "recvonly" });
- // Setup a transceiver for sending mic audio to UE and receiving audio from UE
- if(!self.useMic)
- {
- pc.addTransceiver("audio", { direction: "recvonly" });
- }
- else
- {
- let audioSendOptions = self.useMic ?
- {
- autoGainControl: false,
- channelCount: 1,
- echoCancellation: false,
- latency: 0,
- noiseSuppression: false,
- sampleRate: 16000,
- volume: 1.0
- } : false;
- // Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
- const stream = await navigator.mediaDevices.getUserMedia({video: false, audio: audioSendOptions});
- if(stream)
- {
- for (const track of stream.getTracks()) {
- if(track.kind && track.kind == "audio")
- {
- pc.addTransceiver(track, { direction: "sendrecv" });
- }
- }
- }
- else
- {
- pc.addTransceiver("audio", { direction: "recvonly" });
- }
- }
- };
- //**********************
- //Public functions
- //**********************
- this.setVideoEnabled = function(enabled) {
- self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
- }
- this.startLatencyTest = function(onTestStarted) {
- // Can't start latency test without a video element
- if(!self.video)
- {
- return;
- }
- self.latencyTestTimings.Reset();
- self.latencyTestTimings.TestStartTimeMs = Date.now();
- onTestStarted(self.latencyTestTimings.TestStartTimeMs);
- }
- //This is called when revceiving new ice candidates individually instead of part of the offer
- //This is currently not used but would be called externally from this class
- this.handleCandidateFromServer = function(iceCandidate) {
- console.log("ICE candidate: ", iceCandidate);
- let candidate = new RTCIceCandidate(iceCandidate);
- self.pcClient.addIceCandidate(candidate).then(_=>{
- console.log('ICE candidate successfully added');
- });
- };
- //Called externaly to create an offer for the server
- this.createOffer = function() {
- if(self.pcClient){
- console.log("Closing existing PeerConnection")
- self.pcClient.close();
- self.pcClient = null;
- }
- self.pcClient = new RTCPeerConnection(self.cfg);
- setupTracksToSendAsync(self.pcClient).finally(function()
- {
- setupPeerConnection(self.pcClient);
- self.dcClient = setupDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
- handleCreateOffer(self.pcClient);
- });
-
- };
- //Called externaly when an answer is received from the server
- this.receiveAnswer = function(answer) {
- console.log('Received answer:');
- console.log(answer);
- var answerDesc = new RTCSessionDescription(answer);
- self.pcClient.setRemoteDescription(answerDesc);
- let receivers = self.pcClient.getReceivers();
- for(let receiver of receivers)
- {
- receiver.playoutDelayHint = 0;
- }
- };
- this.close = function(){
- if(self.pcClient){
- console.log("Closing existing peerClient")
- self.pcClient.close();
- self.pcClient = null;
- }
- if(self.aggregateStatsIntervalId)
- clearInterval(self.aggregateStatsIntervalId);
- }
- //Sends data across the datachannel
- this.send = function(data){
- if(self.dcClient && self.dcClient.readyState == 'open'){
- //console.log('Sending data on dataconnection', self.dcClient)
- self.dcClient.send(data);
- }
- };
- this.getStats = function(onStats){
- if(self.pcClient && onStats){
- self.pcClient.getStats(null).then((stats) => {
- onStats(stats);
- });
- }
- }
- this.aggregateStats = function(checkInterval){
- let calcAggregatedStats = generateAggregatedStatsFunction();
- let printAggregatedStats = () => { self.getStats(calcAggregatedStats); }
- self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval);
- }
- };
- return webRtcPlayer;
-
- }));
|