diff --git a/samples/encode-decode-worker/js/main.js b/samples/encode-decode-worker/js/main.js index e399abbf..8bfa3b0f 100644 --- a/samples/encode-decode-worker/js/main.js +++ b/samples/encode-decode-worker/js/main.js @@ -1,392 +1,403 @@ -'use strict'; - -let preferredResolution; -let mediaStream, bitrate = 100000; -let stopped = false; -let preferredCodec ="VP8"; -let mode = "L1T3"; -let latencyPref = "realtime", bitPref = "variable"; -let encHw = "no-preference", decHw = "no-preference"; -let streamWorker; -let inputStream, outputStream; -let metrics = { - all: [], -}; -let e2e = { - all: [], -}; - -const rate = document.querySelector('#rate'); -const connectButton = document.querySelector('#connect'); -const stopButton = document.querySelector('#stop'); -const codecButtons = document.querySelector('#codecButtons'); -const resButtons = document.querySelector('#resButtons'); -const modeButtons = document.querySelector('#modeButtons'); -const decHwButtons = document.querySelector('#decHwButtons'); -const encHwButtons = document.querySelector('#encHwButtons'); -const chart2_div = document.getElementById('chart2_div'); -const videoSelect = document.querySelector('select#videoSource'); -const selectors = [videoSelect]; -chart2_div.style.display = "none"; -connectButton.disabled = false; -stopButton.disabled = true; - -videoSelect.onchange = function () { - videoSource = videoSelect.value; -}; - -const qvgaConstraints = {video: {width: 320, height: 240}}; -const vgaConstraints = {video: {width: 640, height: 480}}; -const hdConstraints = {video: {width: 1280, height: 720}}; -const fullHdConstraints = {video: {width: {min: 1920}, height: {min: 1080}}}; -const tv4KConstraints = {video: {width: {exact: 3840}, height: {exact: 2160}}}; -const cinema4KConstraints = {video: {width: {exact: 4096}, height: {exact: 2160}}}; -const eightKConstraints = {video: {width: {min: 7680}, height: {min: 4320}}}; - -let constraints = qvgaConstraints; - -function metrics_update(data) { - metrics.all.push(data); -} - -function metrics_report() { - metrics.all.sort((a, b) => { - return (100000 * (a.mediaTime - b.mediaTime) + a.output - b.output); - }); - const len = metrics.all.length; - let j = 0; - for (let i = 0; i < len ; i++ ) { - if (metrics.all[i].output == 1) { - const frameno = metrics.all[i].presentedFrames; - const g2g = metrics.all[i].expectedDisplayTime - metrics.all[i-1].captureTime; - const mediaTime = metrics.all[i].mediaTime; - const captureTime = metrics.all[i-1].captureTime; - const expectedDisplayTime = metrics.all[i].expectedDisplayTime; - const delay = metrics.all[i].expectedDisplayTime - metrics.all[i-1].expectedDisplayTime; - const data = [frameno, g2g]; - e2e.all.push(data); - } - } - // addToEventLog('Data dump: ' + JSON.stringify(e2e.all)); - return { - count: e2e.all.length - }; -} - -function addToEventLog(text, severity = 'info') { - let log = document.querySelector('textarea'); - log.value += 'log-' + severity + ': ' + text + '\n'; - if (severity == 'fatal') stop(); -} - -function gotDevices(deviceInfos) { - // Handles being called several times to update labels. Preserve values. - const values = selectors.map(select => select.value); - selectors.forEach(select => { - while (select.firstChild) { - select.removeChild(select.firstChild); - } - }); - for (let i = 0; i !== deviceInfos.length; ++i) { - const deviceInfo = deviceInfos[i]; - const option = document.createElement('option'); - option.value = deviceInfo.deviceId; - if (deviceInfo.kind === 'videoinput') { - option.text = deviceInfo.label || `camera ${videoSelect.length + 1}`; - videoSelect.appendChild(option); - } - } - selectors.forEach((select, selectorIndex) => { - if (Array.prototype.slice.call(select.childNodes).some(n => n.value === values[selectorIndex])) { - select.value = values[selectorIndex]; - } - }); -} - -async function getResValue(radio) { - preferredResolution = radio.value; - addToEventLog('Resolution selected: ' + preferredResolution); - switch(preferredResolution) { - case "qvga": - constraints = qvgaConstraints; - break; - case "vga": - constraints = vgaConstraints; - break; - case "hd": - constraints = hdConstraints; - break; - case "full-hd": - constraints = fullHdConstraints; - break; - case "tv4K": - constraints = tv4KConstraints; - break; - case "cinema4K": - constraints = cinema4KConstraints; - break; - case "eightK": - constraints = eightKConstraints; - break; - default: - constraints = qvgaConstraints; - break; - } - // Get a MediaStream from the webcam, and reset the resolution. - try { - //stop the tracks - if (mediaStream){ - mediaStream.getTracks().forEach(track => { - track.stop(); - }); - } - gotDevices(await navigator.mediaDevices.enumerateDevices()); - constraints.deviceId = videoSource ? {exact: videoSource} : undefined; - mediaStream = await navigator.mediaDevices.getUserMedia(constraints); - document.getElementById('inputVideo').srcObject = mediaStream; - } catch(e){ - addToEventLog(`EnumerateDevices or gUM error: ${e.message}`); - } -} - -function getPrefValue(radio) { - latencyPref = radio.value; - addToEventLog('Latency preference selected: ' + latencyPref); -} - -function getBitPrefValue(radio) { - bitPref = radio.value; - addToEventLog('Bitrate mode selected: ' + bitPref); -} - -function getCodecValue(radio) { - preferredCodec = radio.value; - addToEventLog('Codec selected: ' + preferredCodec); -} - -function getModeValue(radio) { - mode = radio.value; - addToEventLog('Mode selected: ' + mode); -} - -function getDecHwValue(radio) { - decHw = radio.value; - addToEventLog('Decoder Hardware Acceleration preference: ' + decHw); -} - -function getEncHwValue(radio) { - encHw = radio.value; - addToEventLog('Encoder Hardware Acceleration preference: ' + encHw); -} - -function stop() { - stopped = true; - stopButton.disabled = true; - connectButton.disabled = true; - chart2_div.style.display = "initial"; - streamWorker.postMessage({ type: "stop" }); - try { - inputStream.cancel(); - addToEventLog('inputStream cancelled'); - } catch(e) { - addToEventLog(`Could not cancel inputStream: ${e.message}`); - } - try { - outputStream.abort(); - addToEventLog('outputStream aborted'); - } catch(e) { - addToEventLog(`Could not abort outputStream: ${e.message}`); - } -} - -document.addEventListener('DOMContentLoaded', async function(event) { - if (stopped) return; - addToEventLog('DOM Content Loaded'); - - // Need to support standard mediacapture-transform implementations - - if (typeof MediaStreamTrackProcessor === 'undefined' || - typeof MediaStreamTrackGenerator === 'undefined') { - addToEventLog('Your browser does not support the MSTP and MSTG APIs.', 'fatal'); - return; - } - - try { - gotDevices(await navigator.mediaDevices.enumerateDevices()); - } catch (e) { - addToEventLog('Error in Device enumeration'); - } - constraints.deviceId = videoSource ? {exact: videoSource} : undefined; - // Get a MediaStream from the webcam. - mediaStream = await navigator.mediaDevices.getUserMedia(constraints); - // Connect the webcam stream to the video element. - document.getElementById('inputVideo').srcObject = mediaStream; - // Create a new worker. - streamWorker = new Worker("js/stream_worker.js"); - addToEventLog('Worker created.'); - - streamWorker.addEventListener('message', function(e) { - if (e.data.severity != 'chart'){ - addToEventLog('Worker msg: ' + e.data.text, e.data.severity); - } else { - // draw the glass-glass latency chart - metrics_report(); - const x = e2e.all.map(item => item[0]); - const y = e2e.all.map(item => item[1]); - Plotly.newPlot(chart2_div, [{ - x, - y, - mode: 'markers', - type: 'scatter', - }], { - xaxis: { - title: 'Frame number', - autorange: true, - }, - yaxis: { - title: 'Glass-Glass-Latency (ms)', - autorange: true, - }, - title: 'Glass-Glass Latency (ms) versus Frame Number', - }); - } - }, false); - - stopButton.onclick = () => { - addToEventLog('Stop button clicked.'); - stop(); - } - - connectButton.onclick = () => { - connectButton.disabled = true; - stopButton.disabled = false; - decHwButtons.style.display = "none"; - encHwButtons.style.display = "none"; - prefButtons.style.display = "none"; - bitButtons.style.display = "none"; - codecButtons.style.display = "none"; - resButtons.style.display = "none"; - modeButtons.style.display = "none"; - rateInput.style.display = "none"; - keyInput.style.display = "none"; - startMedia(); - } - - async function startMedia() { - if (stopped) return; - addToEventLog('startMedia called'); - try { - // Collect the bitrate - const rate = document.getElementById('rate').value; - - // Collect the keyframe gap - const keygap = document.getElementById('keygap').value; - - // Create a MediaStreamTrackProcessor, which exposes frames from the track - // as a ReadableStream of VideoFrames, using non-standard Chrome API. - let [track] = mediaStream.getVideoTracks(); - let ts = track.getSettings(); - const processor = new MediaStreamTrackProcessor(track); - inputStream = processor.readable; - - // Create a MediaStreamTrackGenerator, which exposes a track from a - // WritableStream of VideoFrames, using non-standard Chrome API. - const generator = new MediaStreamTrackGenerator({kind: 'video'}); - outputStream = generator.writable; - document.getElementById('outputVideo').srcObject = new MediaStream([generator]); - - // Initialize variables - let paint_count = 0; - let start_time = 0.0; - - const recordOutputFrames = (now, metadata) => { - metadata.output = 1.; - metadata.time = now; - if( start_time == 0.0 ) start_time = now; - let elapsed = (now - start_time)/1000.; - let fps = (++paint_count / elapsed).toFixed(3); - metadata.fps = fps; - metrics_update(metadata); - outputVideo.requestVideoFrameCallback(recordOutputFrames); - }; - - outputVideo.requestVideoFrameCallback(recordOutputFrames); - - const recordInputFrames = (now, metadata) => { - metadata.output = 0; - metadata.time = now; - if( start_time == 0.0 ) start_time = now; - let elapsed = (now - start_time)/1000.; - let fps = (++paint_count / elapsed).toFixed(3); - metadata.fps = fps; - metrics_update(metadata); - inputVideo.requestVideoFrameCallback(recordInputFrames); - }; - - inputVideo.requestVideoFrameCallback(recordInputFrames); - - //Create video Encoder configuration - const vConfig = { - keyInterval: keygap, - resolutionScale: 1, - framerateScale: 1.0, - }; - - let ssrcArr = new Uint32Array(1); - window.crypto.getRandomValues(ssrcArr); - const ssrc = ssrcArr[0]; - - const config = { - alpha: "discard", - latencyMode: latencyPref, - bitrateMode: bitPref, - codec: preferredCodec, - width: ts.width/vConfig.resolutionScale, - height: ts.height/vConfig.resolutionScale, - hardwareAcceleration: encHw, - decHwAcceleration: decHw, - bitrate: rate, - framerate: ts.frameRate/vConfig.framerateScale, - keyInterval: vConfig.keyInterval, - ssrc: ssrc - }; - - if (mode != "L1T1") { - config.scalabilityMode = mode; - } - - switch(preferredCodec){ - case "H264": - config.codec = "avc1.42002A"; // baseline profile, level 4.2 - config.avc = { format: "annexb" }; - config.pt = 1; - break; - case "H265": - config.codec = "hvc1.1.6.L123.00"; // Main profile, level 4.1, main Tier - config.hevc = { format: "annexb" }; - config.pt = 2; - break; - case "VP8": - config.codec = "vp8"; - config.pt = 3; - break; - case "VP9": - config.codec = "vp09.00.10.08"; //VP9, Profile 0, level 1, bit depth 8 - config.pt = 4; - break; - case "AV1": - config.codec = "av01.0.08M.10.0.110.09" // AV1 Main Profile, level 4.0, Main tier, 10-bit content, non-monochrome, with 4:2:0 chroma subsampling - config.pt = 5; - break; - } - - // Transfer the readable stream to the worker, as well as other info from the user interface. - // NOTE: transferring frameStream and reading it in the worker is more - // efficient than reading frameStream here and transferring VideoFrames individually. - streamWorker.postMessage({ type: "stream", config: config, streams: {input: inputStream, output: outputStream}}, [inputStream, outputStream]); - - } catch(e) { - addToEventLog(e.name + ": " + e.message, 'fatal'); - } - } -}, false); +'use strict'; + +let preferredResolution; +let mediaStream, bitrate = 100000; +let stopped = false; +let preferredCodec ="VP8"; +let mode = "L1T3"; +let latencyPref = "realtime", bitPref = "variable"; +let encHw = "no-preference", decHw = "no-preference"; +let streamWorker; +let inputStream, outputStream; +let metrics = { + all: [], +}; +let e2e = { + all: [], +}; +let display_metrics = { + all: [], +}; + +const rate = document.querySelector('#rate'); +const connectButton = document.querySelector('#connect'); +const stopButton = document.querySelector('#stop'); +const codecButtons = document.querySelector('#codecButtons'); +const resButtons = document.querySelector('#resButtons'); +const modeButtons = document.querySelector('#modeButtons'); +const decHwButtons = document.querySelector('#decHwButtons'); +const encHwButtons = document.querySelector('#encHwButtons'); +const chart2_div = document.getElementById('chart2_div'); +const videoSelect = document.querySelector('select#videoSource'); +const selectors = [videoSelect]; +chart2_div.style.display = "none"; +connectButton.disabled = false; +stopButton.disabled = true; + +videoSelect.onchange = function () { + videoSource = videoSelect.value; +}; + +const qvgaConstraints = {video: {width: 320, height: 240}}; +const vgaConstraints = {video: {width: 640, height: 480}}; +const hdConstraints = {video: {width: 1280, height: 720}}; +const fullHdConstraints = {video: {width: {min: 1920}, height: {min: 1080}}}; +const tv4KConstraints = {video: {width: {exact: 3840}, height: {exact: 2160}}}; +const cinema4KConstraints = {video: {width: {exact: 4096}, height: {exact: 2160}}}; +const eightKConstraints = {video: {width: {min: 7680}, height: {min: 4320}}}; + +let constraints = qvgaConstraints; + +function metrics_update(data) { + metrics.all.push(data); +} + +function metrics_report() { + metrics.all.sort((a, b) => { + return (100000 * (a.mediaTime - b.mediaTime) + a.output - b.output); + }); + const len = metrics.all.length; + let j = 0; + for (let i = 0; i < len ; i++ ) { + if (metrics.all[i].output == 1) { + const frameno = metrics.all[i].presentedFrames; + const g2g = metrics.all[i].expectedDisplayTime - metrics.all[i-1].captureTime; + const mediaTime = metrics.all[i].mediaTime; + const captureTime = metrics.all[i-1].captureTime; + const expectedDisplayTime = metrics.all[i].expectedDisplayTime; + const delay = metrics.all[i].expectedDisplayTime - metrics.all[i-1].expectedDisplayTime; + const data = [frameno, g2g]; + const info = {frameno: frameno, g2g: g2g, mediaTime: mediaTime, captureTime: captureTime, expectedDisplayTime: expectedDisplayTime, delay: delay}; + e2e.all.push(data); + display_metrics.all.push(info); + } + } + // addToEventLog('Data dump: ' + JSON.stringify(e2e.all)); + return { + count: e2e.all.length + }; +} + +function addToEventLog(text, severity = 'info') { + let log = document.querySelector('textarea'); + log.value += 'log-' + severity + ': ' + text + '\n'; + if (severity == 'fatal') stop(); +} + +function gotDevices(deviceInfos) { + // Handles being called several times to update labels. Preserve values. + const values = selectors.map(select => select.value); + selectors.forEach(select => { + while (select.firstChild) { + select.removeChild(select.firstChild); + } + }); + for (let i = 0; i !== deviceInfos.length; ++i) { + const deviceInfo = deviceInfos[i]; + const option = document.createElement('option'); + option.value = deviceInfo.deviceId; + if (deviceInfo.kind === 'videoinput') { + option.text = deviceInfo.label || `camera ${videoSelect.length + 1}`; + videoSelect.appendChild(option); + } + } + selectors.forEach((select, selectorIndex) => { + if (Array.prototype.slice.call(select.childNodes).some(n => n.value === values[selectorIndex])) { + select.value = values[selectorIndex]; + } + }); +} + +async function getResValue(radio) { + preferredResolution = radio.value; + addToEventLog('Resolution selected: ' + preferredResolution); + switch(preferredResolution) { + case "qvga": + constraints = qvgaConstraints; + break; + case "vga": + constraints = vgaConstraints; + break; + case "hd": + constraints = hdConstraints; + break; + case "full-hd": + constraints = fullHdConstraints; + break; + case "tv4K": + constraints = tv4KConstraints; + break; + case "cinema4K": + constraints = cinema4KConstraints; + break; + case "eightK": + constraints = eightKConstraints; + break; + default: + constraints = qvgaConstraints; + break; + } + // Get a MediaStream from the webcam, and reset the resolution. + try { + //stop the tracks + if (mediaStream){ + mediaStream.getTracks().forEach(track => { + track.stop(); + }); + } + gotDevices(await navigator.mediaDevices.enumerateDevices()); + constraints.deviceId = videoSource ? {exact: videoSource} : undefined; + mediaStream = await navigator.mediaDevices.getUserMedia(constraints); + document.getElementById('inputVideo').srcObject = mediaStream; + } catch(e){ + addToEventLog(`EnumerateDevices or gUM error: ${e.message}`); + } +} + +function getPrefValue(radio) { + latencyPref = radio.value; + addToEventLog('Latency preference selected: ' + latencyPref); +} + +function getBitPrefValue(radio) { + bitPref = radio.value; + addToEventLog('Bitrate mode selected: ' + bitPref); +} + +function getCodecValue(radio) { + preferredCodec = radio.value; + addToEventLog('Codec selected: ' + preferredCodec); +} + +function getModeValue(radio) { + mode = radio.value; + addToEventLog('Mode selected: ' + mode); +} + +function getDecHwValue(radio) { + decHw = radio.value; + addToEventLog('Decoder Hardware Acceleration preference: ' + decHw); +} + +function getEncHwValue(radio) { + encHw = radio.value; + addToEventLog('Encoder Hardware Acceleration preference: ' + encHw); +} + +function stop() { + stopped = true; + stopButton.disabled = true; + connectButton.disabled = true; + chart2_div.style.display = "initial"; + streamWorker.postMessage({ type: "stop" }); + try { + inputStream.cancel(); + addToEventLog('inputStream cancelled'); + } catch(e) { + addToEventLog(`Could not cancel inputStream: ${e.message}`); + } + try { + outputStream.abort(); + addToEventLog('outputStream aborted'); + } catch(e) { + addToEventLog(`Could not abort outputStream: ${e.message}`); + } +} + +document.addEventListener('DOMContentLoaded', async function(event) { + if (stopped) return; + addToEventLog('DOM Content Loaded'); + + // Need to support standard mediacapture-transform implementations + + if (typeof MediaStreamTrackProcessor === 'undefined' || + typeof MediaStreamTrackGenerator === 'undefined') { + addToEventLog('Your browser does not support the MSTP and MSTG APIs.', 'fatal'); + return; + } + + try { + gotDevices(await navigator.mediaDevices.enumerateDevices()); + } catch (e) { + addToEventLog('Error in Device enumeration'); + } + constraints.deviceId = videoSource ? {exact: videoSource} : undefined; + // Get a MediaStream from the webcam. + mediaStream = await navigator.mediaDevices.getUserMedia(constraints); + // Connect the webcam stream to the video element. + document.getElementById('inputVideo').srcObject = mediaStream; + // Create a new worker. + streamWorker = new Worker("js/stream_worker.js"); + addToEventLog('Worker created.'); + + streamWorker.addEventListener('message', function(e) { + if (e.data.severity != 'chart'){ + addToEventLog('Worker msg: ' + e.data.text, e.data.severity); + } else { + // draw the glass-glass latency chart + metrics_report(); + const e2eX = e2e.all.map(item => item[0]); + const e2eY = e2e.all.map(item => item[1]); + const labels = e2e.all.map((item, index) => { + return Object.keys(display_metrics.all[index]).map(key => { + return `${key}: ${display_metrics.all[index][key]}`; + }).join('
'); + }); + Plotly.newPlot(chart2_div, [{ + x: e2eX, + y: e2eY, + text: labels, + mode: 'markers', + type: 'scatter', + }], { + xaxis: { + title: 'Frame number', + autorange: true, + }, + yaxis: { + title: 'Glass-Glass-Latency (ms)', + autorange: true, + }, + title: 'Glass-Glass Latency (ms) versus Frame Number', + }); + } + }, false); + + stopButton.onclick = () => { + addToEventLog('Stop button clicked.'); + stop(); + } + + connectButton.onclick = () => { + connectButton.disabled = true; + stopButton.disabled = false; + decHwButtons.style.display = "none"; + encHwButtons.style.display = "none"; + prefButtons.style.display = "none"; + bitButtons.style.display = "none"; + codecButtons.style.display = "none"; + resButtons.style.display = "none"; + modeButtons.style.display = "none"; + rateInput.style.display = "none"; + keyInput.style.display = "none"; + startMedia(); + } + + async function startMedia() { + if (stopped) return; + addToEventLog('startMedia called'); + try { + // Collect the bitrate + const rate = document.getElementById('rate').value; + + // Collect the keyframe gap + const keygap = document.getElementById('keygap').value; + + // Create a MediaStreamTrackProcessor, which exposes frames from the track + // as a ReadableStream of VideoFrames, using non-standard Chrome API. + let [track] = mediaStream.getVideoTracks(); + let ts = track.getSettings(); + const processor = new MediaStreamTrackProcessor(track); + inputStream = processor.readable; + + // Create a MediaStreamTrackGenerator, which exposes a track from a + // WritableStream of VideoFrames, using non-standard Chrome API. + const generator = new MediaStreamTrackGenerator({kind: 'video'}); + outputStream = generator.writable; + document.getElementById('outputVideo').srcObject = new MediaStream([generator]); + + // Initialize variables + let paint_count = 0; + let start_time = 0.0; + + const recordOutputFrames = (now, metadata) => { + metadata.output = 1.; + metadata.time = now; + if( start_time == 0.0 ) start_time = now; + let elapsed = (now - start_time)/1000.; + let fps = (++paint_count / elapsed).toFixed(3); + metadata.fps = fps; + metrics_update(metadata); + outputVideo.requestVideoFrameCallback(recordOutputFrames); + }; + + outputVideo.requestVideoFrameCallback(recordOutputFrames); + + const recordInputFrames = (now, metadata) => { + metadata.output = 0; + metadata.time = now; + if( start_time == 0.0 ) start_time = now; + let elapsed = (now - start_time)/1000.; + let fps = (++paint_count / elapsed).toFixed(3); + metadata.fps = fps; + metrics_update(metadata); + inputVideo.requestVideoFrameCallback(recordInputFrames); + }; + + inputVideo.requestVideoFrameCallback(recordInputFrames); + + //Create video Encoder configuration + const vConfig = { + keyInterval: keygap, + resolutionScale: 1, + framerateScale: 1.0, + }; + + let ssrcArr = new Uint32Array(1); + window.crypto.getRandomValues(ssrcArr); + const ssrc = ssrcArr[0]; + + const config = { + alpha: "discard", + latencyMode: latencyPref, + bitrateMode: bitPref, + codec: preferredCodec, + width: ts.width/vConfig.resolutionScale, + height: ts.height/vConfig.resolutionScale, + hardwareAcceleration: encHw, + decHwAcceleration: decHw, + bitrate: rate, + framerate: ts.frameRate/vConfig.framerateScale, + keyInterval: vConfig.keyInterval, + ssrc: ssrc + }; + + if (mode != "L1T1") { + config.scalabilityMode = mode; + } + + switch(preferredCodec){ + case "H264": + config.codec = "avc1.42002A"; // baseline profile, level 4.2 + config.avc = { format: "annexb" }; + config.pt = 1; + break; + case "H265": + config.codec = "hvc1.1.6.L123.00"; // Main profile, level 4.1, main Tier + config.hevc = { format: "annexb" }; + config.pt = 2; + break; + case "VP8": + config.codec = "vp8"; + config.pt = 3; + break; + case "VP9": + config.codec = "vp09.00.10.08"; //VP9, Profile 0, level 1, bit depth 8 + config.pt = 4; + break; + case "AV1": + config.codec = "av01.0.08M.10.0.110.09" // AV1 Main Profile, level 4.0, Main tier, 10-bit content, non-monochrome, with 4:2:0 chroma subsampling + config.pt = 5; + break; + } + + // Transfer the readable stream to the worker, as well as other info from the user interface. + // NOTE: transferring frameStream and reading it in the worker is more + // efficient than reading frameStream here and transferring VideoFrames individually. + streamWorker.postMessage({ type: "stream", config: config, streams: {input: inputStream, output: outputStream}}, [inputStream, outputStream]); + + } catch(e) { + addToEventLog(e.name + ": " + e.message, 'fatal'); + } + } +}, false); diff --git a/samples/encode-decode-worker/js/stream_worker.js b/samples/encode-decode-worker/js/stream_worker.js index 48f6c69e..d5238b44 100644 --- a/samples/encode-decode-worker/js/stream_worker.js +++ b/samples/encode-decode-worker/js/stream_worker.js @@ -1,263 +1,263 @@ -'use strict'; - -let encoder, decoder, pl, started = false, stopped = false; - -let encqueue_aggregate = { - all: [], - min: Number.MAX_VALUE, - max: 0, - avg: 0, - sum: 0, -}; - -let decqueue_aggregate = { - all: [], - min: Number.MAX_VALUE, - max: 0, - avg: 0, - sum: 0, -}; - -function encqueue_update(duration) { - encqueue_aggregate.all.push(duration); - encqueue_aggregate.min = Math.min(encqueue_aggregate.min, duration); - encqueue_aggregate.max = Math.max(encqueue_aggregate.max, duration); - encqueue_aggregate.sum += duration; -} - -function encqueue_report() { - encqueue_aggregate.all.sort(); - const len = encqueue_aggregate.all.length; - const half = len >> 1; - const f = (len + 1) >> 2; - const t = (3 * (len + 1)) >> 2; - const alpha1 = (len + 1)/4 - Math.trunc((len + 1)/4); - const alpha3 = (3 * (len + 1)/4) - Math.trunc(3 * (len + 1)/4); - const fquart = encqueue_aggregate.all[f] + alpha1 * (encqueue_aggregate.all[f + 1] - encqueue_aggregate.all[f]); - const tquart = encqueue_aggregate.all[t] + alpha3 * (encqueue_aggregate.all[t + 1] - encqueue_aggregate.all[t]); - const median = len % 2 === 1 ? encqueue_aggregate.all[len >> 1] : (encqueue_aggregate.all[half - 1] + encqueue_aggregate.all[half]) / 2; - return { - count: len, - min: encqueue_aggregate.min, - fquart: fquart, - avg: encqueue_aggregate.sum / len, - median: median, - tquart: tquart, - max: encqueue_aggregate.max, - }; -} - -function decqueue_update(duration) { - decqueue_aggregate.all.push(duration); - decqueue_aggregate.min = Math.min(decqueue_aggregate.min, duration); - decqueue_aggregate.max = Math.max(decqueue_aggregate.max, duration); - decqueue_aggregate.sum += duration; -} - -function decqueue_report() { - decqueue_aggregate.all.sort(); - const len = decqueue_aggregate.all.length; - const half = len >> 1; - const f = (len + 1) >> 2; - const t = (3 * (len + 1)) >> 2; - const alpha1 = (len + 1)/4 - Math.trunc((len + 1)/4); - const alpha3 = (3 * (len + 1)/4) - Math.trunc(3 * (len + 1)/4); - const fquart = decqueue_aggregate.all[f] + alpha1 * (decqueue_aggregate.all[f + 1] - decqueue_aggregate.all[f]); - const tquart = decqueue_aggregate.all[t] + alpha3 * (decqueue_aggregate.all[t + 1] - decqueue_aggregate.all[t]); - const median = len % 2 === 1 ? decqueue_aggregate.all[len >> 1] : (decqueue_aggregate.all[half - 1] + decqueue_aggregate.all[half]) / 2; - return { - count: len, - min: decqueue_aggregate.min, - fquart: fquart, - avg: decqueue_aggregate.sum / len, - median: median, - tquart: tquart, - max: decqueue_aggregate.max, - }; -} - -self.addEventListener('message', async function(e) { - if (stopped) return; - // In this demo, we expect at most two messages, one of each type. - let type = e.data.type; - - if (type == "stop") { - self.postMessage({text: 'Stop message received.'}); - if (started) pl.stop(); - return; - } else if (type != "stream"){ - self.postMessage({severity: 'fatal', text: 'Invalid message received.'}); - return; - } - // We received a "stream" event - self.postMessage({text: 'Stream event received.'}); - - try { - pl = new pipeline(e.data); - pl.start(); - } catch (e) { - self.postMessage({severity: 'fatal', text: `Pipeline creation failed: ${e.message}`}) - return; - } -}, false); - -class pipeline { - - constructor(eventData) { - this.stopped = false; - this.inputStream = eventData.streams.input; - this.outputStream = eventData.streams.output; - this.config = eventData.config; - } - - DecodeVideoStream(self) { - return new TransformStream({ - start(controller) { - this.decoder = decoder = new VideoDecoder({ - output: frame => controller.enqueue(frame), - error: (e) => { - self.postMessage({severity: 'fatal', text: `Init Decoder error: ${e.message}`}); - } - }); - }, - async transform(chunk, controller) { - if (this.decoder.state != "closed") { - if (chunk.type == "config") { - let config = JSON.parse(chunk.config); - try { - const decoderSupport = await VideoDecoder.isConfigSupported(config); - if (decoderSupport.supported) { - this.decoder.configure(decoderSupport.config); - self.postMessage({text: 'Decoder successfully configured:\n' + JSON.stringify(decoderSupport.config)}); - } else { - self.postMessage({severity: 'fatal', text: 'Decoder Config not supported:\n' + JSON.stringify(decoderSupport.config)}); - } - } catch (e) { - self.postMessage({severity: 'fatal', text: `Decoder Configuration error: ${e.message}`}); - } - } else { - try { - const queue = this.decoder.decodeQueueSize; - decqueue_update(queue); - this.decoder.decode(chunk); - } catch (e) { - self.postMessage({severity: 'fatal', text: 'Derror size: ' + chunk.byteLength + ' seq: ' + chunk.seqNo + ' kf: ' + chunk.keyframeIndex + ' delta: ' + chunk.deltaframeIndex + ' dur: ' + chunk.duration + ' ts: ' + chunk.timestamp + ' ssrc: ' + chunk.ssrc + ' pt: ' + chunk.pt + ' tid: ' + chunk.temporalLayerId + ' type: ' + chunk.type}); - self.postMessage({severity: 'fatal', text: `Catch Decode error: ${e.message}`}); - } - } - } - } - }); - } - - EncodeVideoStream(self, config) { - return new TransformStream({ - async start(controller) { - this.frameCounter = 0; - this.seqNo = 0; - this.keyframeIndex = 0; - this.deltaframeIndex = 0; - this.pending_outputs = 0; - this.encoder = encoder = new VideoEncoder({ - output: (chunk, cfg) => { - if (cfg.decoderConfig) { - cfg.decoderConfig.hardwareAcceleration = config.decHwAcceleration; - const decoderConfig = JSON.stringify(cfg.decoderConfig); - self.postMessage({text: 'Configuration: ' + decoderConfig}); - const configChunk = - { - type: "config", - seqNo: this.seqNo, - keyframeIndex: this.keyframeIndex, - deltaframeIndex: this.deltaframeIndex, - timestamp: 0, - pt: 0, - config: decoderConfig - }; - controller.enqueue(configChunk); - } - chunk.temporalLayerId = 0; - if (cfg.svc) { - chunk.temporalLayerId = cfg.svc.temporalLayerId; - } - this.seqNo++; - if (chunk.type == 'key') { - this.keyframeIndex++; - this.deltaframeIndex = 0; - } else { - this.deltaframeIndex++; - } - this.pending_outputs--; - chunk.seqNo = this.seqNo; - chunk.keyframeIndex = this.keyframeIndex; - chunk.deltaframeIndex = this.deltaframeIndex; - controller.enqueue(chunk); - }, - error: (e) => { - self.postMessage({severity: 'fatal', text: `Encoder error: ${e.message}`}); - } - }); - try { - const encoderSupport = await VideoEncoder.isConfigSupported(config); - if (encoderSupport.supported) { - this.encoder.configure(encoderSupport.config); - self.postMessage({text: 'Encoder successfully configured:\n' + JSON.stringify(encoderSupport.config)}); - } else { - self.postMessage({severity: 'fatal', text: 'Config not supported:\n' + JSON.stringify(encoderSupport.config)}); - } - } catch (e) { - self.postMessage({severity: 'fatal', text: `Configuration error: ${e.message}`}); - } - }, - transform(frame, controller) { - if (this.pending_outputs <= 30) { - this.pending_outputs++; - const insert_keyframe = (this.frameCounter % config.keyInterval) == 0; - this.frameCounter++; - try { - if (this.encoder.state != "closed") { - const queue = this.encoder.encodeQueueSize; - encqueue_update(queue); - this.encoder.encode(frame, { keyFrame: insert_keyframe }); - } - } catch(e) { - self.postMessage({severity: 'fatal', text: 'Encoder Error: ' + e.message}); - } - } - frame.close(); - } - }); - } - - stop() { - if (encoder.state != "closed") encoder.close(); - if (decoder.state != "closed") decoder.close(); - stopped = true; - this.stopped = true; - const len = encqueue_aggregate.all.length; - if (len > 1) { - const encqueue_stats = encqueue_report(); - const decqueue_stats = decqueue_report(); - self.postMessage({severity: 'chart'}); - self.postMessage({text: 'Encoder Queue report: ' + JSON.stringify(encqueue_stats)}); - self.postMessage({text: 'Decoder Queue report: ' + JSON.stringify(decqueue_stats)}); - } - self.postMessage({text: 'stop(): frame, encoder and decoder closed'}); - return; - } - - async start() { - if (stopped) return; - started = true; - let duplexStream, readStream, writeStream; - self.postMessage({text: 'Start method called.'}); - try { - await this.inputStream - .pipeThrough(this.EncodeVideoStream(self,this.config)) - .pipeThrough(this.DecodeVideoStream(self)) - .pipeTo(this.outputStream); - } catch (e) { - self.postMessage({severity: 'fatal', text: `start error: ${e.message}`}); - } - } -} +'use strict'; + +let encoder, decoder, pl, started = false, stopped = false; + +let encqueue_aggregate = { + all: [], + min: Number.MAX_VALUE, + max: 0, + avg: 0, + sum: 0, +}; + +let decqueue_aggregate = { + all: [], + min: Number.MAX_VALUE, + max: 0, + avg: 0, + sum: 0, +}; + +function encqueue_update(duration) { + encqueue_aggregate.all.push(duration); + encqueue_aggregate.min = Math.min(encqueue_aggregate.min, duration); + encqueue_aggregate.max = Math.max(encqueue_aggregate.max, duration); + encqueue_aggregate.sum += duration; +} + +function encqueue_report() { + encqueue_aggregate.all.sort(); + const len = encqueue_aggregate.all.length; + const half = len >> 1; + const f = (len + 1) >> 2; + const t = (3 * (len + 1)) >> 2; + const alpha1 = (len + 1)/4 - Math.trunc((len + 1)/4); + const alpha3 = (3 * (len + 1)/4) - Math.trunc(3 * (len + 1)/4); + const fquart = encqueue_aggregate.all[f] + alpha1 * (encqueue_aggregate.all[f + 1] - encqueue_aggregate.all[f]); + const tquart = encqueue_aggregate.all[t] + alpha3 * (encqueue_aggregate.all[t + 1] - encqueue_aggregate.all[t]); + const median = len % 2 === 1 ? encqueue_aggregate.all[len >> 1] : (encqueue_aggregate.all[half - 1] + encqueue_aggregate.all[half]) / 2; + return { + count: len, + min: encqueue_aggregate.min, + fquart: fquart, + avg: encqueue_aggregate.sum / len, + median: median, + tquart: tquart, + max: encqueue_aggregate.max, + }; +} + +function decqueue_update(duration) { + decqueue_aggregate.all.push(duration); + decqueue_aggregate.min = Math.min(decqueue_aggregate.min, duration); + decqueue_aggregate.max = Math.max(decqueue_aggregate.max, duration); + decqueue_aggregate.sum += duration; +} + +function decqueue_report() { + decqueue_aggregate.all.sort(); + const len = decqueue_aggregate.all.length; + const half = len >> 1; + const f = (len + 1) >> 2; + const t = (3 * (len + 1)) >> 2; + const alpha1 = (len + 1)/4 - Math.trunc((len + 1)/4); + const alpha3 = (3 * (len + 1)/4) - Math.trunc(3 * (len + 1)/4); + const fquart = decqueue_aggregate.all[f] + alpha1 * (decqueue_aggregate.all[f + 1] - decqueue_aggregate.all[f]); + const tquart = decqueue_aggregate.all[t] + alpha3 * (decqueue_aggregate.all[t + 1] - decqueue_aggregate.all[t]); + const median = len % 2 === 1 ? decqueue_aggregate.all[len >> 1] : (decqueue_aggregate.all[half - 1] + decqueue_aggregate.all[half]) / 2; + return { + count: len, + min: decqueue_aggregate.min, + fquart: fquart, + avg: decqueue_aggregate.sum / len, + median: median, + tquart: tquart, + max: decqueue_aggregate.max, + }; +} + +self.addEventListener('message', async function(e) { + if (stopped) return; + // In this demo, we expect at most two messages, one of each type. + let type = e.data.type; + + if (type == "stop") { + self.postMessage({text: 'Stop message received.'}); + if (started) pl.stop(); + return; + } else if (type != "stream"){ + self.postMessage({severity: 'fatal', text: 'Invalid message received.'}); + return; + } + // We received a "stream" event + self.postMessage({text: 'Stream event received.'}); + + try { + pl = new pipeline(e.data); + pl.start(); + } catch (e) { + self.postMessage({severity: 'fatal', text: `Pipeline creation failed: ${e.message}`}) + return; + } +}, false); + +class pipeline { + + constructor(eventData) { + this.stopped = false; + this.inputStream = eventData.streams.input; + this.outputStream = eventData.streams.output; + this.config = eventData.config; + } + + DecodeVideoStream(self) { + return new TransformStream({ + start(controller) { + this.decoder = decoder = new VideoDecoder({ + output: frame => controller.enqueue(frame), + error: (e) => { + self.postMessage({severity: 'fatal', text: `Init Decoder error: ${e.message}`}); + } + }); + }, + async transform(chunk, controller) { + if (this.decoder.state != "closed") { + if (chunk.type == "config") { + let config = JSON.parse(chunk.config); + try { + const decoderSupport = await VideoDecoder.isConfigSupported(config); + if (decoderSupport.supported) { + this.decoder.configure(decoderSupport.config); + self.postMessage({text: 'Decoder successfully configured:\n' + JSON.stringify(decoderSupport.config)}); + } else { + self.postMessage({severity: 'fatal', text: 'Decoder Config not supported:\n' + JSON.stringify(decoderSupport.config)}); + } + } catch (e) { + self.postMessage({severity: 'fatal', text: `Decoder Configuration error: ${e.message}`}); + } + } else { + try { + const queue = this.decoder.decodeQueueSize; + decqueue_update(queue); + this.decoder.decode(chunk); + } catch (e) { + self.postMessage({severity: 'fatal', text: 'Derror size: ' + chunk.byteLength + ' seq: ' + chunk.seqNo + ' kf: ' + chunk.keyframeIndex + ' delta: ' + chunk.deltaframeIndex + ' dur: ' + chunk.duration + ' ts: ' + chunk.timestamp + ' ssrc: ' + chunk.ssrc + ' pt: ' + chunk.pt + ' tid: ' + chunk.temporalLayerId + ' type: ' + chunk.type}); + self.postMessage({severity: 'fatal', text: `Catch Decode error: ${e.message}`}); + } + } + } + } + }); + } + + EncodeVideoStream(self, config) { + return new TransformStream({ + async start(controller) { + this.frameCounter = 0; + this.seqNo = 0; + this.keyframeIndex = 0; + this.deltaframeIndex = 0; + this.pending_outputs = 0; + this.encoder = encoder = new VideoEncoder({ + output: (chunk, cfg) => { + if (cfg.decoderConfig) { + cfg.decoderConfig.hardwareAcceleration = config.decHwAcceleration; + const decoderConfig = JSON.stringify(cfg.decoderConfig); + self.postMessage({text: 'Configuration: ' + decoderConfig}); + const configChunk = + { + type: "config", + seqNo: this.seqNo, + keyframeIndex: this.keyframeIndex, + deltaframeIndex: this.deltaframeIndex, + timestamp: 0, + pt: 0, + config: decoderConfig + }; + controller.enqueue(configChunk); + } + chunk.temporalLayerId = 0; + if (cfg.svc) { + chunk.temporalLayerId = cfg.svc.temporalLayerId; + } + this.seqNo++; + if (chunk.type == 'key') { + this.keyframeIndex++; + this.deltaframeIndex = 0; + } else { + this.deltaframeIndex++; + } + this.pending_outputs--; + chunk.seqNo = this.seqNo; + chunk.keyframeIndex = this.keyframeIndex; + chunk.deltaframeIndex = this.deltaframeIndex; + controller.enqueue(chunk); + }, + error: (e) => { + self.postMessage({severity: 'fatal', text: `Encoder error: ${e.message}`}); + } + }); + try { + const encoderSupport = await VideoEncoder.isConfigSupported(config); + if (encoderSupport.supported) { + this.encoder.configure(encoderSupport.config); + self.postMessage({text: 'Encoder successfully configured:\n' + JSON.stringify(encoderSupport.config)}); + } else { + self.postMessage({severity: 'fatal', text: 'Config not supported:\n' + JSON.stringify(encoderSupport.config)}); + } + } catch (e) { + self.postMessage({severity: 'fatal', text: `Configuration error: ${e.message}`}); + } + }, + transform(frame, controller) { + if (this.pending_outputs <= 30) { + this.pending_outputs++; + const insert_keyframe = (this.frameCounter % config.keyInterval) == 0; + this.frameCounter++; + try { + if (this.encoder.state != "closed") { + const queue = this.encoder.encodeQueueSize; + encqueue_update(queue); + this.encoder.encode(frame, { keyFrame: insert_keyframe }); + } + } catch(e) { + self.postMessage({severity: 'fatal', text: 'Encoder Error: ' + e.message}); + } + } + frame.close(); + } + }); + } + + stop() { + if (encoder.state != "closed") encoder.close(); + if (decoder.state != "closed") decoder.close(); + stopped = true; + this.stopped = true; + const len = encqueue_aggregate.all.length; + if (len > 1) { + const encqueue_stats = encqueue_report(); + const decqueue_stats = decqueue_report(); + self.postMessage({severity: 'chart'}); + self.postMessage({text: 'Encoder Queue report: ' + JSON.stringify(encqueue_stats)}); + self.postMessage({text: 'Decoder Queue report: ' + JSON.stringify(decqueue_stats)}); + } + self.postMessage({text: 'stop(): frame, encoder and decoder closed'}); + return; + } + + async start() { + if (stopped) return; + started = true; + let duplexStream, readStream, writeStream; + self.postMessage({text: 'Start method called.'}); + try { + await this.inputStream + .pipeThrough(this.EncodeVideoStream(self,this.config)) + .pipeThrough(this.DecodeVideoStream(self)) + .pipeTo(this.outputStream); + } catch (e) { + self.postMessage({severity: 'fatal', text: `start error: ${e.message}`}); + } + } +}