diff --git a/webrtc/RTCPeerConnection-helper.js b/webrtc/RTCPeerConnection-helper.js index 24b3db7a0880fc..8a11a1ca1e9a00 100644 --- a/webrtc/RTCPeerConnection-helper.js +++ b/webrtc/RTCPeerConnection-helper.js @@ -582,7 +582,7 @@ const trackFactories = { return dst.stream.getAudioTracks()[0]; }, - video({width = 640, height = 480} = {}) { + video({width = 640, height = 480, signal = null} = {}) { const canvas = Object.assign( document.createElement("canvas"), {width, height} ); @@ -593,8 +593,13 @@ const trackFactories = { setInterval(() => { ctx.fillStyle = `rgb(${count%255}, ${count*count%255}, ${count%255})`; count += 1; - ctx.fillRect(0, 0, width, height); + // If signal is set, add a constant-color box to the video frame. + if (signal !== null) { + ctx.fillStyle = `rgb(${signal}, ${signal}, ${signal})`; + ctx.fillRect(10, 10, 20, 20); + let pixel = ctx.getImageData(15, 15, 1, 1); + } }, 100); if (document.body) { @@ -609,13 +614,40 @@ const trackFactories = { } }; +// Get the signal from a video element inserted by createNoiseStream +function getVideoSignal(v) { + if (v.videoWidth < 21 || v.videoHeight < 21) { + return null; + } + const canvas = new OffscreenCanvas(v.videoWidth, v.videoHeight); + let context = canvas.getContext('2d'); + context.drawImage(v, 0, 0, v.videoWidth, v.videoHeight); + // Extract pixel value at position 20, 20 + let pixel = context.getImageData(20, 20, 1, 1); + return (pixel.data[0] + pixel.data[1] + pixel.data[2]) / 3; +} + +function detectSignal(t, v, value) { + return new Promise((resolve) => { + let check = () => { + const signal = getVideoSignal(v); + if (signal !== null && signal < value + 1 && signal > value - 1) { + resolve(); + } else { + t.step_timeout(check, 100); + } + } + check(); + }); +} + // Generate a MediaStream bearing the specified tracks. // // @param {object} [caps] // @param {boolean} [caps.audio] - flag indicating whether the generated stream // should include an audio track // @param {boolean} [caps.video] - flag indicating whether the generated stream -// should include a video track +// should include a video track, or parameters for video async function getNoiseStream(caps = {}) { if (!trackFactories.canCreate(caps)) { return navigator.mediaDevices.getUserMedia(caps); @@ -627,7 +659,7 @@ async function getNoiseStream(caps = {}) { } if (caps.video) { - tracks.push(trackFactories.video()); + tracks.push(trackFactories.video(caps.video)); } return new MediaStream(tracks); diff --git a/webrtc/RTCRtpSender-replaceTrack.https.html b/webrtc/RTCRtpSender-replaceTrack.https.html index a2c790b144e2de..671e86fd4e893a 100644 --- a/webrtc/RTCRtpSender-replaceTrack.https.html +++ b/webrtc/RTCRtpSender-replaceTrack.https.html @@ -272,5 +272,35 @@ without negotiating. 3. Queue a task that runs the following steps: 1. If connection's [[isClosed]] slot is true, abort these steps. - */ + */ + +promise_test(async t => { + const v = document.createElement('video'); + v.autoplay = true; + const pc1 = new RTCPeerConnection(); + t.add_cleanup(() => pc1.close()); + const pc2 = new RTCPeerConnection(); + t.add_cleanup(() => pc2.close()); + const stream1 = await getNoiseStream({video: {signal: 20}}); + t.add_cleanup(() => stream1.getTracks().forEach(track => track.stop())); + const [track1] = stream1.getTracks(); + const stream2 = await getNoiseStream({video: {signal: 250}}); + t.add_cleanup(() => stream2.getTracks().forEach(track => track.stop())); + const [track2] = stream2.getTracks(); + const sender = pc1.addTrack(track1); + pc2.ontrack = (e) => { + v.srcObject = new MediaStream([e.track]); + }; + const metadataToBeLoaded = new Promise((resolve) => { + v.addEventListener('loadedmetadata', () => { + resolve(); + }); + }); + exchangeIceCandidates(pc1, pc2); + doSignalingHandshake(pc1, pc2); + await metadataToBeLoaded; + await detectSignal(t, v, 20); + await sender.replaceTrack(track2); + await detectSignal(t, v, 250); +}, 'ReplaceTrack transmits the new track not the old track');