webRtcPlayer.js 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600
  1. // Copyright Epic Games, Inc. All Rights Reserved.
  2. function webRtcPlayer(parOptions) {
  3. parOptions = typeof parOptions !== 'undefined' ? parOptions : {};
  4. var self = this;
  5. const urlParams = new URLSearchParams(window.location.search);
  6. //**********************
  7. //Config setup
  8. //**********************
  9. this.cfg = typeof parOptions.peerConnectionOptions !== 'undefined' ? parOptions.peerConnectionOptions : {};
  10. this.cfg.sdpSemantics = 'unified-plan';
  11. // this.cfg.rtcAudioJitterBufferMaxPackets = 10;
  12. // this.cfg.rtcAudioJitterBufferFastAccelerate = true;
  13. // this.cfg.rtcAudioJitterBufferMinDelayMs = 0;
  14. // If this is true in Chrome 89+ SDP is sent that is incompatible with UE Pixel Streaming 4.26 and below.
  15. // However 4.27 Pixel Streaming does not need this set to false as it supports `offerExtmapAllowMixed`.
  16. // tdlr; uncomment this line for older versions of Pixel Streaming that need Chrome 89+.
  17. this.cfg.offerExtmapAllowMixed = false;
  18. this.forceTURN = urlParams.has('ForceTURN');
  19. if (this.forceTURN) {
  20. console.log("Forcing TURN usage by setting ICE Transport Policy in peer connection config.");
  21. this.cfg.iceTransportPolicy = "relay";
  22. }
  23. this.cfg.bundlePolicy = "balanced";
  24. this.forceMaxBundle = urlParams.has('ForceMaxBundle');
  25. if (this.forceMaxBundle) {
  26. this.cfg.bundlePolicy = "max-bundle";
  27. }
  28. //**********************
  29. //Variables
  30. //**********************
  31. this.pcClient = null;
  32. this.dcClient = null;
  33. this.tnClient = null;
  34. this.sdpConstraints = {
  35. offerToReceiveAudio: 1, //Note: if you don't need audio you can get improved latency by turning this off.
  36. offerToReceiveVideo: 1,
  37. voiceActivityDetection: false
  38. };
  39. // See https://www.w3.org/TR/webrtc/#dom-rtcdatachannelinit for values (this is needed for Firefox to be consistent with Chrome.)
  40. this.dataChannelOptions = { ordered: true };
  41. // This is useful if the video/audio needs to autoplay (without user input) as browsers do not allow autoplay non-muted of sound sources without user interaction.
  42. this.startVideoMuted = typeof parOptions.startVideoMuted !== 'undefined' ? parOptions.startVideoMuted : false;
  43. this.autoPlayAudio = typeof parOptions.autoPlayAudio !== 'undefined' ? parOptions.autoPlayAudio : true;
  44. // To enable mic in browser use SSL/localhost and have ?useMic in the query string.
  45. this.useMic = urlParams.has('useMic');
  46. if (!this.useMic) {
  47. console.log("Microphone access is not enabled. Pass ?useMic in the url to enable it.");
  48. }
  49. // When ?useMic check for SSL or localhost
  50. let isLocalhostConnection = location.hostname === "localhost" || location.hostname === "127.0.0.1";
  51. let isHttpsConnection = location.protocol === 'https:';
  52. if (this.useMic && !isLocalhostConnection && !isHttpsConnection) {
  53. this.useMic = false;
  54. console.error("Microphone access in the browser will not work if you are not on HTTPS or localhost. Disabling mic access.");
  55. console.error("For testing you can enable HTTP microphone access Chrome by visiting chrome://flags/ and enabling 'unsafely-treat-insecure-origin-as-secure'");
  56. }
  57. // Prefer SFU or P2P connection
  58. this.preferSFU = urlParams.has('preferSFU');
  59. console.log(this.preferSFU ?
  60. "The browser will signal it would prefer an SFU connection. Remove ?preferSFU from the url to signal for P2P usage." :
  61. "The browser will signal for a P2P connection. Pass ?preferSFU in the url to signal for SFU usage.");
  62. // Latency tester
  63. this.latencyTestTimings =
  64. {
  65. TestStartTimeMs: null,
  66. UEReceiptTimeMs: null,
  67. UEEncodeMs: null,
  68. UECaptureToSendMs: null,
  69. UETransmissionTimeMs: null,
  70. BrowserReceiptTimeMs: null,
  71. FrameDisplayDeltaTimeMs: null,
  72. Reset: function () {
  73. this.TestStartTimeMs = null;
  74. this.UEReceiptTimeMs = null;
  75. this.UEEncodeMs = null,
  76. this.UECaptureToSendMs = null,
  77. this.UETransmissionTimeMs = null;
  78. this.BrowserReceiptTimeMs = null;
  79. this.FrameDisplayDeltaTimeMs = null;
  80. },
  81. SetUETimings: function (UETimings) {
  82. this.UEReceiptTimeMs = UETimings.ReceiptTimeMs;
  83. this.UEEncodeMs = UETimings.EncodeMs,
  84. this.UECaptureToSendMs = UETimings.CaptureToSendMs,
  85. this.UETransmissionTimeMs = UETimings.TransmissionTimeMs;
  86. this.BrowserReceiptTimeMs = Date.now();
  87. this.OnAllLatencyTimingsReady(this);
  88. },
  89. SetFrameDisplayDeltaTime: function (DeltaTimeMs) {
  90. if (this.FrameDisplayDeltaTimeMs == null) {
  91. this.FrameDisplayDeltaTimeMs = Math.round(DeltaTimeMs);
  92. this.OnAllLatencyTimingsReady(this);
  93. }
  94. },
  95. OnAllLatencyTimingsReady: function (Timings) { }
  96. }
  97. //**********************
  98. //Functions
  99. //**********************
  100. //Create Video element and expose that as a parameter
  101. this.createWebRtcVideo = function () {
  102. var video = document.createElement('video');
  103. video.id = "streamingVideo";
  104. video.playsInline = true;
  105. video.disablepictureinpicture = true;
  106. // video.muted = self.startVideoMuted;
  107. // 音频
  108. video.muted = true;
  109. // 开启全屏
  110. video.style.width = "100%"
  111. video.style.height = "100%"
  112. video.style.objectFit = "fill"
  113. video.style.margin = 0;
  114. video.style.padding = 0;
  115. video.style.top = 0;
  116. video.style.left = 0;
  117. video.style.position = "relative";
  118. video.style.zIndex = 100;
  119. video.style.cursor = "pointer";
  120. // video.style.overflow="hidden";
  121. video.addEventListener('loadedmetadata', function (e) {
  122. if (self.onVideoInitialised) {
  123. self.onVideoInitialised();
  124. }
  125. }, true);
  126. // Check if request video frame callback is supported
  127. if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
  128. // The API is supported!
  129. const onVideoFrameReady = (now, metadata) => {
  130. if (metadata.receiveTime && metadata.expectedDisplayTime) {
  131. const receiveToCompositeMs = metadata.presentationTime - metadata.receiveTime;
  132. self.aggregatedStats.receiveToCompositeMs = receiveToCompositeMs;
  133. }
  134. // Re-register the callback to be notified about the next frame.
  135. video.requestVideoFrameCallback(onVideoFrameReady);
  136. };
  137. // Initially register the callback to be notified about the first frame.
  138. video.requestVideoFrameCallback(onVideoFrameReady);
  139. }
  140. return video;
  141. }
  142. this.video = this.createWebRtcVideo();
  143. this.availableVideoStreams = new Map();
  144. function onsignalingstatechange(state) {
  145. console.info('Signaling state change. |', state.srcElement.signalingState, "|")
  146. };
  147. function oniceconnectionstatechange(state) {
  148. console.info('Browser ICE connection |', state.srcElement.iceConnectionState, '|')
  149. };
  150. function onicegatheringstatechange(state) {
  151. console.info('Browser ICE gathering |', state.srcElement.iceGatheringState, '|')
  152. };
  153. function handleOnTrack(e) {
  154. if (e.track) {
  155. console.log('Got track. | Kind=' + e.track.kind + ' | Id=' + e.track.id + ' | readyState=' + e.track.readyState + ' |');
  156. }
  157. if (e.track.kind == "audio") {
  158. handleOnAudioTrack(e.streams[0]);
  159. return;
  160. }
  161. else (e.track.kind == "video")
  162. {
  163. for (const s of e.streams) {
  164. if (!self.availableVideoStreams.has(s.id)) {
  165. self.availableVideoStreams.set(s.id, s);
  166. }
  167. }
  168. self.video.srcObject = e.streams[0];
  169. // All tracks are added "muted" by WebRTC/browser and become unmuted when media is being sent
  170. e.track.onunmute = () => {
  171. self.video.srcObject = e.streams[0];
  172. self.onNewVideoTrack(e.streams);
  173. }
  174. }
  175. };
  176. function handleOnAudioTrack(audioMediaStream) {
  177. // do nothing the video has the same media stream as the audio track we have here (they are linked)
  178. if (self.video.srcObject == audioMediaStream) {
  179. return;
  180. }
  181. // video element has some other media stream that is not associated with this audio track
  182. else if (self.video.srcObject && self.video.srcObject !== audioMediaStream) {
  183. // create a new audio element
  184. let audioElem = document.createElement("Audio");
  185. audioElem.srcObject = audioMediaStream;
  186. // there is no way to autoplay audio (even muted), so we defer audio until first click
  187. if (!self.autoPlayAudio) {
  188. let clickToPlayAudio = function () {
  189. audioElem.play();
  190. self.video.removeEventListener("click", clickToPlayAudio);
  191. };
  192. self.video.addEventListener("click", clickToPlayAudio);
  193. }
  194. // we assume the user has clicked somewhere on the page and autoplaying audio will work
  195. else {
  196. audioElem.play();
  197. }
  198. console.log('Created new audio element to play seperate audio stream.');
  199. }
  200. }
  201. function onDataChannel(dataChannelEvent) {
  202. // This is the primary data channel code path when we are "receiving"
  203. console.log("Data channel created for us by browser as we are a receiving peer.");
  204. self.dcClient = dataChannelEvent.channel;
  205. setupDataChannelCallbacks(self.dcClient);
  206. }
  207. function createDataChannel(pc, label, options) {
  208. // This is the primary data channel code path when we are "offering"
  209. let datachannel = pc.createDataChannel(label, options);
  210. console.log(`Created datachannel (${label})`);
  211. setupDataChannelCallbacks(datachannel);
  212. return datachannel;
  213. }
  214. function setupDataChannelCallbacks(datachannel) {
  215. try {
  216. // Inform browser we would like binary data as an ArrayBuffer (FF chooses Blob by default!)
  217. datachannel.binaryType = "arraybuffer";
  218. datachannel.onopen = function (e) {
  219. console.log("Data channel connected");
  220. if (self.onDataChannelConnected) {
  221. self.onDataChannelConnected();
  222. }
  223. }
  224. datachannel.onclose = function (e) {
  225. console.log("Data channel connected", e);
  226. }
  227. datachannel.onmessage = function (e) {
  228. if (self.onDataChannelMessage) {
  229. self.onDataChannelMessage(e.data);
  230. }
  231. }
  232. datachannel.onerror = function (e) {
  233. console.error("Data channel error", e);
  234. }
  235. return datachannel;
  236. } catch (e) {
  237. console.warn('No data channel', e);
  238. return null;
  239. }
  240. }
  241. function onicecandidate(e) {
  242. let candidate = e.candidate;
  243. if (candidate && candidate.candidate) {
  244. console.log("%c[Browser ICE candidate]", "background: violet; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
  245. self.onWebRtcCandidate(candidate);
  246. }
  247. };
  248. function handleCreateOffer(pc) {
  249. pc.createOffer(self.sdpConstraints).then(function (offer) {
  250. // Munging is where we modifying the sdp string to set parameters that are not exposed to the browser's WebRTC API
  251. mungeSDPOffer(offer);
  252. // Set our munged SDP on the local peer connection so it is "set" and will be send across
  253. pc.setLocalDescription(offer);
  254. if (self.onWebRtcOffer) {
  255. self.onWebRtcOffer(offer);
  256. }
  257. },
  258. function () { console.warn("Couldn't create offer") });
  259. }
  260. function mungeSDPOffer(offer) {
  261. // turn off video-timing sdp sent from browser
  262. //offer.sdp = offer.sdp.replace("http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", "");
  263. // this indicate we support stereo (Chrome needs this)
  264. offer.sdp = offer.sdp.replace('useinbandfec=1', 'useinbandfec=1;stereo=1;sprop-maxcapturerate=48000');
  265. }
  266. function setupPeerConnection(pc) {
  267. //Setup peerConnection events
  268. pc.onsignalingstatechange = onsignalingstatechange;
  269. pc.oniceconnectionstatechange = oniceconnectionstatechange;
  270. pc.onicegatheringstatechange = onicegatheringstatechange;
  271. pc.ontrack = handleOnTrack;
  272. pc.onicecandidate = onicecandidate;
  273. pc.ondatachannel = onDataChannel;
  274. };
  275. function generateAggregatedStatsFunction() {
  276. if (!self.aggregatedStats)
  277. self.aggregatedStats = {};
  278. return function (stats) {
  279. //console.log('Printing Stats');
  280. let newStat = {};
  281. stats.forEach(stat => {
  282. // console.log(JSON.stringify(stat, undefined, 4));
  283. if (stat.type == 'inbound-rtp'
  284. && !stat.isRemote
  285. && (stat.mediaType == 'video' || stat.id.toLowerCase().includes('video'))) {
  286. newStat.timestamp = stat.timestamp;
  287. newStat.bytesReceived = stat.bytesReceived;
  288. newStat.framesDecoded = stat.framesDecoded;
  289. newStat.packetsLost = stat.packetsLost;
  290. newStat.bytesReceivedStart = self.aggregatedStats && self.aggregatedStats.bytesReceivedStart ? self.aggregatedStats.bytesReceivedStart : stat.bytesReceived;
  291. newStat.framesDecodedStart = self.aggregatedStats && self.aggregatedStats.framesDecodedStart ? self.aggregatedStats.framesDecodedStart : stat.framesDecoded;
  292. newStat.timestampStart = self.aggregatedStats && self.aggregatedStats.timestampStart ? self.aggregatedStats.timestampStart : stat.timestamp;
  293. if (self.aggregatedStats && self.aggregatedStats.timestamp) {
  294. if (self.aggregatedStats.bytesReceived) {
  295. // bitrate = bits received since last time / number of ms since last time
  296. //This is automatically in kbits (where k=1000) since time is in ms and stat we want is in seconds (so a '* 1000' then a '/ 1000' would negate each other)
  297. newStat.bitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceived) / (newStat.timestamp - self.aggregatedStats.timestamp);
  298. newStat.bitrate = Math.floor(newStat.bitrate);
  299. newStat.lowBitrate = self.aggregatedStats.lowBitrate && self.aggregatedStats.lowBitrate < newStat.bitrate ? self.aggregatedStats.lowBitrate : newStat.bitrate
  300. newStat.highBitrate = self.aggregatedStats.highBitrate && self.aggregatedStats.highBitrate > newStat.bitrate ? self.aggregatedStats.highBitrate : newStat.bitrate
  301. }
  302. if (self.aggregatedStats.bytesReceivedStart) {
  303. newStat.avgBitrate = 8 * (newStat.bytesReceived - self.aggregatedStats.bytesReceivedStart) / (newStat.timestamp - self.aggregatedStats.timestampStart);
  304. newStat.avgBitrate = Math.floor(newStat.avgBitrate);
  305. }
  306. if (self.aggregatedStats.framesDecoded) {
  307. // framerate = frames decoded since last time / number of seconds since last time
  308. newStat.framerate = (newStat.framesDecoded - self.aggregatedStats.framesDecoded) / ((newStat.timestamp - self.aggregatedStats.timestamp) / 1000);
  309. newStat.framerate = Math.floor(newStat.framerate);
  310. newStat.lowFramerate = self.aggregatedStats.lowFramerate && self.aggregatedStats.lowFramerate < newStat.framerate ? self.aggregatedStats.lowFramerate : newStat.framerate
  311. newStat.highFramerate = self.aggregatedStats.highFramerate && self.aggregatedStats.highFramerate > newStat.framerate ? self.aggregatedStats.highFramerate : newStat.framerate
  312. }
  313. if (self.aggregatedStats.framesDecodedStart) {
  314. newStat.avgframerate = (newStat.framesDecoded - self.aggregatedStats.framesDecodedStart) / ((newStat.timestamp - self.aggregatedStats.timestampStart) / 1000);
  315. newStat.avgframerate = Math.floor(newStat.avgframerate);
  316. }
  317. }
  318. }
  319. //Read video track stats
  320. if (stat.type == 'track' && (stat.trackIdentifier == 'video_label' || stat.kind == 'video')) {
  321. newStat.framesDropped = stat.framesDropped;
  322. newStat.framesReceived = stat.framesReceived;
  323. newStat.framesDroppedPercentage = stat.framesDropped / stat.framesReceived * 100;
  324. newStat.frameHeight = stat.frameHeight;
  325. newStat.frameWidth = stat.frameWidth;
  326. newStat.frameHeightStart = self.aggregatedStats && self.aggregatedStats.frameHeightStart ? self.aggregatedStats.frameHeightStart : stat.frameHeight;
  327. newStat.frameWidthStart = self.aggregatedStats && self.aggregatedStats.frameWidthStart ? self.aggregatedStats.frameWidthStart : stat.frameWidth;
  328. }
  329. if (stat.type == 'candidate-pair' && stat.hasOwnProperty('currentRoundTripTime') && stat.currentRoundTripTime != 0) {
  330. newStat.currentRoundTripTime = stat.currentRoundTripTime;
  331. }
  332. });
  333. if (self.aggregatedStats.receiveToCompositeMs) {
  334. newStat.receiveToCompositeMs = self.aggregatedStats.receiveToCompositeMs;
  335. self.latencyTestTimings.SetFrameDisplayDeltaTime(self.aggregatedStats.receiveToCompositeMs);
  336. }
  337. self.aggregatedStats = newStat;
  338. if (self.onAggregatedStats)
  339. self.onAggregatedStats(newStat)
  340. }
  341. };
  342. let setupTransceiversAsync = async function (pc) {
  343. let hasTransceivers = pc.getTransceivers().length > 0;
  344. // Setup a transceiver for getting UE video
  345. pc.addTransceiver("video", { direction: "recvonly" });
  346. // Setup a transceiver for sending mic audio to UE and receiving audio from UE
  347. if (!self.useMic) {
  348. pc.addTransceiver("audio", { direction: "recvonly" });
  349. }
  350. else {
  351. let audioSendOptions = self.useMic ?
  352. {
  353. autoGainControl: false,
  354. channelCount: 1,
  355. echoCancellation: false,
  356. latency: 0,
  357. noiseSuppression: false,
  358. sampleRate: 48000,
  359. volume: 1.0
  360. } : false;
  361. // Note using mic on android chrome requires SSL or chrome://flags/ "unsafely-treat-insecure-origin-as-secure"
  362. const stream = await navigator.mediaDevices.getUserMedia({ video: false, audio: audioSendOptions });
  363. if (stream) {
  364. if (hasTransceivers) {
  365. for (let transceiver of pc.getTransceivers()) {
  366. if (transceiver && transceiver.receiver && transceiver.receiver.track && transceiver.receiver.track.kind === "audio") {
  367. for (const track of stream.getTracks()) {
  368. if (track.kind && track.kind == "audio") {
  369. transceiver.sender.replaceTrack(track);
  370. transceiver.direction = "sendrecv";
  371. }
  372. }
  373. }
  374. }
  375. }
  376. else {
  377. for (const track of stream.getTracks()) {
  378. if (track.kind && track.kind == "audio") {
  379. pc.addTransceiver(track, { direction: "sendrecv" });
  380. }
  381. }
  382. }
  383. }
  384. else {
  385. pc.addTransceiver("audio", { direction: "recvonly" });
  386. }
  387. }
  388. };
  389. //**********************
  390. //Public functions
  391. //**********************
  392. this.setVideoEnabled = function (enabled) {
  393. self.video.srcObject.getTracks().forEach(track => track.enabled = enabled);
  394. }
  395. this.startLatencyTest = function (onTestStarted) {
  396. // Can't start latency test without a video element
  397. if (!self.video) {
  398. return;
  399. }
  400. self.latencyTestTimings.Reset();
  401. self.latencyTestTimings.TestStartTimeMs = Date.now();
  402. onTestStarted(self.latencyTestTimings.TestStartTimeMs);
  403. }
  404. //This is called when revceiving new ice candidates individually instead of part of the offer
  405. this.handleCandidateFromServer = function (iceCandidate) {
  406. let candidate = new RTCIceCandidate(iceCandidate);
  407. console.log("%c[Unreal ICE candidate]", "background: pink; color: black", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|");
  408. // if forcing TURN, reject any candidates not relay
  409. if (self.forceTURN) {
  410. // check if no relay address is found, if so, we are assuming it means no TURN server
  411. if (candidate.candidate.indexOf("relay") < 0) {
  412. console.warn("Dropping candidate because it was not TURN relay.", "| Type=", candidate.type, "| Protocol=", candidate.protocol, "| Address=", candidate.address, "| Port=", candidate.port, "|")
  413. return;
  414. }
  415. }
  416. self.pcClient.addIceCandidate(candidate).catch(function (e) {
  417. console.error("Failed to add ICE candidate", e);
  418. });
  419. };
  420. //Called externaly to create an offer for the server
  421. this.createOffer = function () {
  422. if (self.pcClient) {
  423. console.log("Closing existing PeerConnection")
  424. self.pcClient.close();
  425. self.pcClient = null;
  426. }
  427. self.pcClient = new RTCPeerConnection(self.cfg);
  428. setupPeerConnection(self.pcClient);
  429. setupTransceiversAsync(self.pcClient).finally(function () {
  430. self.dcClient = createDataChannel(self.pcClient, 'cirrus', self.dataChannelOptions);
  431. handleCreateOffer(self.pcClient);
  432. });
  433. };
  434. //Called externaly when an offer is received from the server
  435. this.receiveOffer = function (offer) {
  436. var offerDesc = new RTCSessionDescription(offer);
  437. if (!self.pcClient) {
  438. console.log("Creating a new PeerConnection in the browser.")
  439. self.pcClient = new RTCPeerConnection(self.cfg);
  440. setupPeerConnection(self.pcClient);
  441. // Put things here that happen post transceiver setup
  442. self.pcClient.setRemoteDescription(offerDesc)
  443. .then(() => {
  444. setupTransceiversAsync(self.pcClient).finally(function () {
  445. self.pcClient.createAnswer()
  446. .then(answer => self.pcClient.setLocalDescription(answer))
  447. .then(() => {
  448. if (self.onWebRtcAnswer) {
  449. self.onWebRtcAnswer(self.pcClient.currentLocalDescription);
  450. }
  451. })
  452. .then(() => {
  453. let receivers = self.pcClient.getReceivers();
  454. for (let receiver of receivers) {
  455. receiver.playoutDelayHint = 0;
  456. }
  457. })
  458. .catch((error) => console.error("createAnswer() failed:", error));
  459. });
  460. });
  461. }
  462. };
  463. //Called externaly when an answer is received from the server
  464. this.receiveAnswer = function (answer) {
  465. var answerDesc = new RTCSessionDescription(answer);
  466. self.pcClient.setRemoteDescription(answerDesc);
  467. let receivers = self.pcClient.getReceivers();
  468. for (let receiver of receivers) {
  469. receiver.playoutDelayHint = 0;
  470. }
  471. };
  472. this.close = function () {
  473. if (self.pcClient) {
  474. console.log("Closing existing peerClient")
  475. self.pcClient.close();
  476. self.pcClient = null;
  477. }
  478. if (self.aggregateStatsIntervalId)
  479. clearInterval(self.aggregateStatsIntervalId);
  480. }
  481. //Sends data across the datachannel
  482. this.send = function (data) {
  483. if (self.dcClient && self.dcClient.readyState == 'open') {
  484. //console.log('Sending data on dataconnection', self.dcClient)
  485. self.dcClient.send(data);
  486. }
  487. };
  488. this.getStats = function (onStats) {
  489. if (self.pcClient && onStats) {
  490. self.pcClient.getStats(null).then((stats) => {
  491. onStats(stats);
  492. });
  493. }
  494. }
  495. this.aggregateStats = function (checkInterval) {
  496. let calcAggregatedStats = generateAggregatedStatsFunction();
  497. let printAggregatedStats = () => { self.getStats(calcAggregatedStats); }
  498. self.aggregateStatsIntervalId = setInterval(printAggregatedStats, checkInterval);
  499. }
  500. }
  501. export default webRtcPlayer