webRtcPlayer.js 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. // Copyright Epic Games, Inc. All Rights Reserved.
  2. // universal module definition - read https://www.davidbcalhoun.com/2014/what-is-amd-commonjs-and-umd/
  3. (function (root, factory) {
  4. if (typeof define === 'function' && define.amd) {
  5. // AMD. Register as an anonymous module.
  6. define(["./adapter"], factory);
  7. } else if (typeof exports === 'object') {
  8. // Node. Does not work with strict CommonJS, but
  9. // only CommonJS-like environments that support module.exports,
  10. // like Node.
  11. module.exports = factory(require("./adapter"));
  12. } else {
  13. // Browser globals (root is window)
  14. root.webRtcPlayer = factory(root.adapter);
  15. }
  16. }(this, function (adapter) {
  17. function webRtcPlayer(parOptions) {
  18. parOptions = typeof parOptions !== 'undefined' ? parOptions : {};
  19. var self = this;
  20. //**********************
  21. //Config setup
  22. //**********************
  23. this.cfg = typeof parOptions.peerConnectionOptions !== 'undefined' ? parOptions.peerConnectionOptions : {};
  24. this.cfg.sdpSemantics = 'unified-plan';
  25. // this.cfg.rtcAudioJitterBufferMaxPackets = 10;
  26. // this.cfg.rtcAudioJitterBufferFastAccelerate = true;
  27. // this.cfg.rtcAudioJitterBufferMinDelayMs = 0;
  28. // If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
  29. // However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
  30. // tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
  31. this.cfg.offerExtmapAllowMixed = false;
  32. //**********************
  33. //Variables
  34. //**********************
  35. this.pcClient = null;
  36. this.dcClient = null;
  37. this.tnClient = null;
  38. this.sdpConstraints = {
  39. offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
  40. offerToReceiveVideo: 1,
  41. voiceActivityDetection: false
  42. };
  43. // See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
  44. this.dataChannelOptions = {ordered: true};
  45. // This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
  46. this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
  47. this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
  48. // To enable mic in browser use SSL/localhost and have ?useMic in the query string.
  49. const urlParams = new URLSearchParams(window.location.search);
  50. this.useMic = urlParams.has('useMic');
  51. if(!this.useMic)
  52. {
  53. console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
  54. }
  55. // When ?useMic check for SSL or localhost
  56. let isLocalhostConnection = location.hostname === "localhost" || location.hostname === "127.0.0.1";
  57. let isHttpsConnection = location.protocol === 'https:';
  58. if(this.useMic && !isLocalhostConnection && !isHttpsConnection)
  59. {
  60. this.useMic = false;
  61. console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
  62. console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
  63. }
  64. // Latency tester
  65. this.latencyTestTimings =
  66. {
  67. TestStartTimeMs: null,
  68. UEReceiptTimeMs: null,
  69. UEPreCaptureTimeMs: null,
  70. UEPostCaptureTimeMs: null,
  71. UEPreEncodeTimeMs: null,
  72. UEPostEncodeTimeMs: null,
  73. UETransmissionTimeMs: null,
  74. BrowserReceiptTimeMs: null,
  75. FrameDisplayDeltaTimeMs: null,
  76. Reset: function()
  77. {
  78. this.TestStartTimeMs = null;
  79. this.UEReceiptTimeMs = null;
  80. this.UEPreCaptureTimeMs = null;
  81. this.UEPostCaptureTimeMs = null;
  82. this.UEPreEncodeTimeMs = null;
  83. this.UEPostEncodeTimeMs = null;
  84. this.UETransmissionTimeMs = null;
  85. this.BrowserReceiptTimeMs = null;
  86. this.FrameDisplayDeltaTimeMs = null;
  87. },
  88. SetUETimings: function(UETimings)
  89. {
  90. this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
  91. this.UEPreCaptureTimeMs = UETimings.PreCaptureTimeMs;
  92. this.UEPostCaptureTimeMs = UETimings.PostCaptureTimeMs;
  93. this.UEPreEncodeTimeMs = UETimings.PreEncodeTimeMs;
  94. this.UEPostEncodeTimeMs = UETimings.PostEncodeTimeMs;
  95. this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
  96. this.BrowserReceiptTimeMs = Date.now();
  97. this.OnAllLatencyTimingsReady(this);
  98. },
  99. SetFrameDisplayDeltaTime: function(DeltaTimeMs)
  100. {
  101. if(this.FrameDisplayDeltaTimeMs == null)
  102. {
  103. this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
  104. this.OnAllLatencyTimingsReady(this);
  105. }
  106. },
  107. OnAllLatencyTimingsReady: function(Timings){}
  108. }
  109. //**********************
  110. //Functions
  111. //**********************
  112. //Create Video element and expose that as a parameter
  113. this.createWebRtcVideo = function() {
  114. var video = document.createElement('video');
  115. video.id = "streamingVideo";
  116. video.playsInline = true;
  117. video.disablepictureinpicture = true;
  118. // video.muted = self.startVideoMuted;
  119. video.muted = true;
  120. video.addEventListener('loadedmetadata', function(e){
  121. if(self.onVideoInitialised){
  122. self.onVideoInitialised();
  123. }
  124. }, true);
  125. // Check if request video frame callback is supported
  126. if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
  127. // The API is supported!
  128. const onVideoFrameReady = (now, metadata) => {
  129. if(metadata.receiveTime && metadata.expectedDisplayTime)
  130. {
  131. const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
  132. self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
  133. }
  134. // Re-register the callback to be notified about the next frame.
  135. video.requestVideoFrameCallback(onVideoFrameReady);
  136. };
  137. // Initially register the callback to be notified about the first frame.
  138. video.requestVideoFrameCallback(onVideoFrameReady);
  139. }
  140. return video;
  141. }
  142. this.video = this.createWebRtcVideo();
  143. onsignalingstatechange = function(state) {
  144. console.info('signaling state change:', state)
  145. };
  146. oniceconnectionstatechange = function(state) {
  147. console.info('ice connection state change:', state)
  148. };
  149. onicegatheringstatechange = function(state) {
  150. console.info('ice gathering state change:', state)
  151. };
  152. handleOnTrack = function(e) {
  153. console.log('handleOnTrack', e.streams);
  154. if (e.track)
  155. {
  156. console.log('Got track - ' + e.track.kind + ' id=' + e.track.id + ' readyState=' + e.track.readyState);
  157. }
  158. if(e.track.kind == "audio")
  159. {
  160. handleOnAudioTrack(e.streams[0]);
  161. return;
  162. }
  163. else(e.track.kind == "video" && self.video.srcObject !== e.streams[0])
  164. {
  165. self.video.srcObject = e.streams[0];
  166. console.log('Set video source from video track ontrack.');
  167. return;
  168. }
  169. };
  170. handleOnAudioTrack = function(audioMediaStream)
  171. {
  172. // do nothing the video has the same media stream as the audio track we have here (they are linked)
  173. if(self.video.srcObject == audioMediaStream)
  174. {
  175. return;
  176. }
  177. // video element has some other media stream that is not associated with this audio track
  178. else if(self.video.srcObject && self.video.srcObject !== audioMediaStream)
  179. {
  180. // create a new audio element
  181. let audioElem = document.createElement("Audio");
  182. audioElem.srcObject = audioMediaStream;
  183. // there is no way to autoplay audio (even muted), so we defer audio until first click
  184. if(!self.autoPlayAudio) {
  185. let clickToPlayAudio = function() {
  186. audioElem.play();
  187. self.video.removeEventListener("click", clickToPlayAudio);
  188. };
  189. self.video.addEventListener("click", clickToPlayAudio);
  190. }
  191. // we assume the user has clicked somewhere on the page and autoplaying audio will work
  192. else {
  193. audioElem.play();
  194. }
  195. console.log('Created new audio element to play seperate audio stream.');
  196. }
  197. }
  198. setupDataChannel = function(pc, label, options) {
  199. try {
  200. let datachannel = pc.createDataChannel(label, options);
  201. console.log(`Created datachannel (${label})`)
  202. // Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
  203. datachannel.binaryType = "arraybuffer";
  204. datachannel.onopen = function (e) {
  205. console.log(`data channel (${label}) connect`)
  206. if(self.onDataChannelConnected){
  207. self.onDataChannelConnected();
  208. }
  209. }
  210. datachannel.onclose = function (e) {
  211. console.log(`data channel (${label}) closed`)
  212. }
  213. datachannel.onmessage = function (e) {
  214. //console.log(`Got message (${label})`, e.data)
  215. if (self.onDataChannelMessage)
  216. self.onDataChannelMessage(e.data);
  217. }
  218. return datachannel;
  219. } catch (e) {
  220. console.warn('No data channel', e);
  221. return null;
  222. }
  223. }
  224. onicecandidate = function (e) {
  225. console.log('ICE candidate', e)
  226. if (e.candidate && e.candidate.candidate) {
  227. self.onWebRtcCandidate(e.candidate);
  228. }
  229. };
  230. handleCreateOffer = function (pc) {
  231. pc.createOffer(self.sdpConstraints).then(function (offer) {
  232. // Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
  233. mungeSDPOffer(offer);
  234. // Set our munged SDP on the local peer connection so it is "set" and will be send across
  235. pc.setLocalDescription(offer);
  236. if (self.onWebRtcOffer) {
  237. self.onWebRtcOffer(offer);
  238. }
  239. },
  240. function () { console.warn("Couldn't create offer") });
  241. }
  242. mungeSDPOffer = function (offer) {
  243. // turn off video-timing sdp sent from browser
  244. //offer.sdp = offer.sdp.replace("http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", "");
  245. // this indicate we support stereo (Chrome needs this)
  246. offer.sdp = offer.sdp.replace('useinbandfec=1', 'useinbandfec=1;stereo=1;sprop-maxcapturerate=48000');
  247. }
  248. setupPeerConnection = function (pc) {
  249. if (pc.SetBitrate)
  250. console.log("Hurray! there's RTCPeerConnection.SetBitrate function");
  251. //Setup peerConnection events
  252. pc.onsignalingstatechange = onsignalingstatechange;
  253. pc.oniceconnectionstatechange = oniceconnectionstatechange;
  254. pc.onicegatheringstatechange = onicegatheringstatechange;
  255. pc.ontrack = handleOnTrack;
  256. pc.onicecandidate = onicecandidate;
  257. };
  258. generateAggregatedStatsFunction = function(){
  259. if(!self.aggregatedStats)
  260. self.aggregatedStats = {};
  261. return function(stats){
  262. //console.log('Printing Stats');
  263. let newStat = {};
  264. stats.forEach(stat => {
  265. // console.log(JSON.stringify(stat, undefined, 4));
  266. if (stat.type == 'inbound-rtp'
  267. && !stat.isRemote
  268. && (stat.mediaType == 'video' || stat.id.toLowerCase().includes('video'))) {
  269. newStat.timestamp = stat.timestamp;
  270. newStat.bytesReceived = stat.bytesReceived;
  271. newStat.framesDecoded = stat.framesDecoded;
  272. newStat.packetsLost = stat.packetsLost;
  273. newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived;
  274. newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded;
  275. newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp;
  276. if(self.aggregatedStats && self.aggregatedStats.timestamp){
  277. if(self.aggregatedStats.bytesReceived){
  278. // bitrate = bits received since last time / number of ms since last time
  279. //This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
  280. newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp);
  281. newStat.bitrate = Math.floor(newStat.bitrate);
  282. newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate
  283. newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate
  284. }
  285. if(self.aggregatedStats.bytesReceivedStart){
  286. newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart);
  287. newStat.avgBitrate = Math.floor(newStat.avgBitrate);
  288. }
  289. if(self.aggregatedStats.framesDecoded){
  290. // framerate = frames decoded since last time / number of seconds since last time
  291. newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000);
  292. newStat.framerate = Math.floor(newStat.framerate);
  293. newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate
  294. newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate
  295. }
  296. if(self.aggregatedStats.framesDecodedStart){
  297. newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000);
  298. newStat.avgframerate = Math.floor(newStat.avgframerate);
  299. }
  300. }
  301. }
  302. //Read video track stats
  303. if(stat.type == 'track' && (stat.trackIdentifier == 'video_label' || stat.kind == 'video')) {
  304. newStat.framesDropped = stat.framesDropped;
  305. newStat.framesReceived = stat.framesReceived;
  306. newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100;
  307. newStat.frameHeight = stat.frameHeight;
  308. newStat.frameWidth = stat.frameWidth;
  309. newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight;
  310. newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth;
  311. }
  312. if(stat.type =='candidate-pair' && stat.hasOwnProperty('currentRoundTripTime') && stat.currentRoundTripTime != 0){
  313. newStat.currentRoundTripTime = stat.currentRoundTripTime;
  314. }
  315. });
  316. if(self.aggregatedStats.receiveToCompositeMs)
  317. {
  318. newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
  319. self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
  320. }
  321. self.aggregatedStats = newStat;
  322. if(self.onAggregatedStats)
  323. self.onAggregatedStats(newStat)
  324. }
  325. };
  326. setupTracksToSendAsync = async function(pc){
  327. // Setup a transceiver for getting UE video
  328. pc.addTransceiver("video", { direction: "recvonly" });
  329. // Setup a transceiver for sending mic audio to UE and receiving audio from UE
  330. if(!self.useMic)
  331. {
  332. pc.addTransceiver("audio", { direction: "recvonly" });
  333. }
  334. else
  335. {
  336. let audioSendOptions = self.useMic ?
  337. {
  338. autoGainControl: false,
  339. channelCount: 1,
  340. echoCancellation: false,
  341. latency: 0,
  342. noiseSuppression: false,
  343. sampleRate: 16000,
  344. volume: 1.0
  345. } : false;
  346. // Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
  347. const stream = await navigator.mediaDevices.getUserMedia({video: false, audio: audioSendOptions});
  348. if(stream)
  349. {
  350. for (const track of stream.getTracks()) {
  351. if(track.kind && track.kind == "audio")
  352. {
  353. pc.addTransceiver(track, { direction: "sendrecv" });
  354. }
  355. }
  356. }
  357. else
  358. {
  359. pc.addTransceiver("audio", { direction: "recvonly" });
  360. }
  361. }
  362. };
  363. //**********************
  364. //Public functions
  365. //**********************
  366. this.setVideoEnabled = function(enabled) {
  367. self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
  368. }
  369. this.startLatencyTest = function(onTestStarted) {
  370. // Can't start latency test without a video element
  371. if(!self.video)
  372. {
  373. return;
  374. }
  375. self.latencyTestTimings.Reset();
  376. self.latencyTestTimings.TestStartTimeMs = Date.now();
  377. onTestStarted(self.latencyTestTimings.TestStartTimeMs);
  378. }
  379. //This is called when revceiving new ice candidates individually instead of part of the offer
  380. //This is currently not used but would be called externally from this class
  381. this.handleCandidateFromServer = function(iceCandidate) {
  382. console.log("ICE candidate: ", iceCandidate);
  383. let candidate = new RTCIceCandidate(iceCandidate);
  384. self.pcClient.addIceCandidate(candidate).then(_=>{
  385. console.log('ICE candidate successfully added');
  386. });
  387. };
  388. //Called externaly to create an offer for the server
  389. this.createOffer = function() {
  390. if(self.pcClient){
  391. console.log("Closing existing PeerConnection")
  392. self.pcClient.close();
  393. self.pcClient = null;
  394. }
  395. self.pcClient = new RTCPeerConnection(self.cfg);
  396. setupTracksToSendAsync(self.pcClient).finally(function()
  397. {
  398. setupPeerConnection(self.pcClient);
  399. self.dcClient = setupDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
  400. handleCreateOffer(self.pcClient);
  401. });
  402. };
  403. //Called externaly when an answer is received from the server
  404. this.receiveAnswer = function(answer) {
  405. console.log('Received answer:');
  406. console.log(answer);
  407. var answerDesc = new RTCSessionDescription(answer);
  408. self.pcClient.setRemoteDescription(answerDesc);
  409. let receivers = self.pcClient.getReceivers();
  410. for(let receiver of receivers)
  411. {
  412. receiver.playoutDelayHint = 0;
  413. }
  414. };
  415. this.close = function(){
  416. if(self.pcClient){
  417. console.log("Closing existing peerClient")
  418. self.pcClient.close();
  419. self.pcClient = null;
  420. }
  421. if(self.aggregateStatsIntervalId)
  422. clearInterval(self.aggregateStatsIntervalId);
  423. }
  424. //Sends data across the datachannel
  425. this.send = function(data){
  426. if(self.dcClient && self.dcClient.readyState == 'open'){
  427. //console.log('Sending data on dataconnection', self.dcClient)
  428. self.dcClient.send(data);
  429. }
  430. };
  431. this.getStats = function(onStats){
  432. if(self.pcClient && onStats){
  433. self.pcClient.getStats(null).then((stats) => {
  434. onStats(stats);
  435. });
  436. }
  437. }
  438. this.aggregateStats = function(checkInterval){
  439. let calcAggregatedStats = generateAggregatedStatsFunction();
  440. let printAggregatedStats = () => { self.getStats(calcAggregatedStats); }
  441. self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval);
  442. }
  443. };
  444. return webRtcPlayer;
  445. }));