Skip to content

Commit

Permalink
Merge pull request #718 from aboba/main
Browse files Browse the repository at this point in the history
Add encode/decode time graphs to encode-decode sample
  • Loading branch information
Djuffin authored Sep 28, 2023
2 parents 3856bbf + 9aaf899 commit 2a0d426
Show file tree
Hide file tree
Showing 3 changed files with 162 additions and 30 deletions.
10 changes: 10 additions & 0 deletions samples/encode-decode-worker/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,12 @@ <h2>WebCodecs in Worker + RVFC</h2>
value=3000>
</div>

<div id="frameInput">
<label for="framer">framerate: </label>
<input type="text" name="framer" id="framer" minlength=2 maxlength=3 size=3
value=30>
</div>

<div id="codecButtons">
<p>Codec:</p>
<input type="radio" id="H264" name="codec" value="H264" onchange="getCodecValue(this)">
Expand Down Expand Up @@ -155,6 +161,10 @@ <h2>WebCodecs in Worker + RVFC</h2>

<div id="chart2_div" style="width: 900px; height: 500px;"></div>

<div id="chart3_div" style="width: 900px; height: 500px;"></div>

<div id="chart4_div" style="width: 900px; height: 500px;"></div>

<div class="select">
<label for="videoSource">Video source: </label><select id="videoSource"></select>
</div>
Expand Down
74 changes: 47 additions & 27 deletions samples/encode-decode-worker/js/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ let display_metrics = {
};

const rate = document.querySelector('#rate');
const framer = document.querySelector('#framer');
const connectButton = document.querySelector('#connect');
const stopButton = document.querySelector('#stop');
const codecButtons = document.querySelector('#codecButtons');
Expand All @@ -28,9 +29,13 @@ const modeButtons = document.querySelector('#modeButtons');
const decHwButtons = document.querySelector('#decHwButtons');
const encHwButtons = document.querySelector('#encHwButtons');
const chart2_div = document.getElementById('chart2_div');
const chart3_div = document.getElementById('chart3_div');
const chart4_div = document.getElementById('chart4_div');
const videoSelect = document.querySelector('select#videoSource');
const selectors = [videoSelect];
chart2_div.style.display = "none";
chart3_div.style.display = "none";
chart4_div.style.display = "none";
connectButton.disabled = false;
stopButton.disabled = true;

Expand Down Expand Up @@ -189,6 +194,8 @@ function stop() {
stopButton.disabled = true;
connectButton.disabled = true;
chart2_div.style.display = "initial";
chart3_div.style.display = "initial";
chart4_div.style.display = "initial";
streamWorker.postMessage({ type: "stop" });
try {
inputStream.cancel();
Expand Down Expand Up @@ -231,35 +238,42 @@ document.addEventListener('DOMContentLoaded', async function(event) {
addToEventLog('Worker created.');

streamWorker.addEventListener('message', function(e) {
let labels = '';
if (e.data.severity != 'chart'){
addToEventLog('Worker msg: ' + e.data.text, e.data.severity);
} else {
// draw the glass-glass latency chart
metrics_report();
const e2eX = e2e.all.map(item => item[0]);
const e2eY = e2e.all.map(item => item[1]);
const labels = e2e.all.map((item, index) => {
return Object.keys(display_metrics.all[index]).map(key => {
return `${key}: ${display_metrics.all[index][key]}`;
}).join('<br>');
if (e.data.text == '') {
metrics_report(); // sets e2e.all and display_metrics
e.data.text = JSON.stringify(e2e.all);
labels = e2e.all.map((item, index) => {
return Object.keys(display_metrics.all[index]).map(key => {
return `${key}: ${display_metrics.all[index][key]}`;
}).join('<br>');
});
}
const parsed = JSON.parse(e.data.text);
const x = parsed.map(item => item[0]);
const y = parsed.map(item => item[1]);
// TODO: more options needed from https://plotly.com/javascript/line-and-scatter
Plotly.newPlot(e.data.div, [{
x,
y,
text: labels,
mode: 'markers',
type: 'scatter',
}], {
xaxis: {
title: e.data.x,
autorange: true,
range: [0, Math.max.apply(null, x) + 100 /* + a bit, 10%-ish to make it look good */],
},
yaxis: {
title: e.data.y,
autorange: true,
//range: [0, Math.max.apply(null, y) /* + a bit, 10%-ish to make it look good */],
},
title: e.data.label,
});
Plotly.newPlot(chart2_div, [{
x: e2eX,
y: e2eY,
text: labels,
mode: 'markers',
type: 'scatter',
}], {
xaxis: {
title: 'Frame number',
autorange: true,
},
yaxis: {
title: 'Glass-Glass-Latency (ms)',
autorange: true,
},
title: 'Glass-Glass Latency (ms) versus Frame Number',
});
}
}, false);

Expand All @@ -279,6 +293,7 @@ document.addEventListener('DOMContentLoaded', async function(event) {
resButtons.style.display = "none";
modeButtons.style.display = "none";
rateInput.style.display = "none";
frameInput.style.display = "none";
keyInput.style.display = "none";
startMedia();
}
Expand All @@ -290,6 +305,9 @@ document.addEventListener('DOMContentLoaded', async function(event) {
// Collect the bitrate
const rate = document.getElementById('rate').value;

// Collect the framerate
const framer = document.getElementById('framer').value;

// Collect the keyframe gap
const keygap = document.getElementById('keygap').value;

Expand Down Expand Up @@ -346,6 +364,7 @@ document.addEventListener('DOMContentLoaded', async function(event) {
let ssrcArr = new Uint32Array(1);
window.crypto.getRandomValues(ssrcArr);
const ssrc = ssrcArr[0];
const framerat = Math.min(framer, ts.frameRate/vConfig.framerateScale) ;

const config = {
alpha: "discard",
Expand All @@ -357,7 +376,7 @@ document.addEventListener('DOMContentLoaded', async function(event) {
hardwareAcceleration: encHw,
decHwAcceleration: decHw,
bitrate: rate,
framerate: ts.frameRate/vConfig.framerateScale,
framerate: framerat,
keyInterval: vConfig.keyInterval,
ssrc: ssrc
};
Expand All @@ -368,7 +387,8 @@ document.addEventListener('DOMContentLoaded', async function(event) {

switch(preferredCodec){
case "H264":
config.codec = "avc1.42002A"; // baseline profile, level 4.2
config.codec = "avc1.42002A"; // baseline profile, level 4.2
/* config.codec = "avc1.640028"; */
config.avc = { format: "annexb" };
config.pt = 1;
break;
Expand Down
108 changes: 105 additions & 3 deletions samples/encode-decode-worker/js/stream_worker.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,28 @@

let encoder, decoder, pl, started = false, stopped = false;

let enc_aggregate = {
all: [],
};

let enc_time = {
all: [],
min: Number.MAX_VALUE,
max: 0,
sum: 0
};

let dec_aggregate = {
all: [],
};

let dec_time = {
all: [],
min: Number.MAX_VALUE,
max: 0,
sum: 0
};

let encqueue_aggregate = {
all: [],
min: Number.MAX_VALUE,
Expand All @@ -18,13 +40,44 @@ let decqueue_aggregate = {
sum: 0,
};

function enc_update(data) {
enc_aggregate.all.push(data);
}

function encqueue_update(duration) {
encqueue_aggregate.all.push(duration);
encqueue_aggregate.min = Math.min(encqueue_aggregate.min, duration);
encqueue_aggregate.max = Math.max(encqueue_aggregate.max, duration);
encqueue_aggregate.sum += duration;
}

function enc_report() {
enc_aggregate.all.sort((a, b) => {
return (100000 * (a.timestamp - b.timestamp) + a.output - b.output);
});
const len = enc_aggregate.all.length;
if (len < 2) return;
for (let i = 1; i < len ; i++ ) {
if ((enc_aggregate.all[i].output == 1) && (enc_aggregate.all[i-1].output == 0) && (enc_aggregate.all[i].timestamp == enc_aggregate.all[i-1].timestamp)) {
const timestamp = enc_aggregate.all[i].timestamp;
const enc_delay = enc_aggregate.all[i].time - enc_aggregate.all[i-1].time;
const data = [timestamp, enc_delay];
enc_time.all.push(data);
enc_time.min = Math.min(enc_time.min, enc_delay);
enc_time.max = Math.max(enc_time.max, enc_delay);
enc_time.sum += enc_delay;
}
}
const avg = enc_time.sum / enc_time.all.length;
//self.postMessage({text: 'Encode Time Data dump: ' + JSON.stringify(enc_time.all)});
return {
count: enc_time.all.length,
min: enc_time.min,
avg: avg,
max: enc_time.max
};
}

function encqueue_report() {
encqueue_aggregate.all.sort();
const len = encqueue_aggregate.all.length;
Expand All @@ -47,6 +100,37 @@ function encqueue_report() {
};
}

function dec_update(data) {
dec_aggregate.all.push(data);
}

function dec_report() {
dec_aggregate.all.sort((a, b) => {
return (100000 * (a.timestamp - b.timestamp) + a.output - b.output);
});
const len = dec_aggregate.all.length;
if (len < 2) return;
for (let i = 1; i < len ; i++ ) {
if ((dec_aggregate.all[i].output == 1) && (dec_aggregate.all[i-1].output == 0) && (dec_aggregate.all[i].timestamp == dec_aggregate.all[i-1].timestamp)) {
const timestamp = dec_aggregate.all[i].timestamp;
const dec_delay = dec_aggregate.all[i].time - dec_aggregate.all[i-1].time;
const data = [timestamp, dec_delay];
dec_time.all.push(data);
dec_time.min = Math.min(dec_time.min, dec_delay);
dec_time.max = Math.max(dec_time.max, dec_delay);
dec_time.sum += dec_delay;
}
}
const avg = dec_time.sum / dec_time.all.length;
//self.postMessage({text: 'Decode Time Data dump: ' + JSON.stringify(dec_time.all)});
return {
count: dec_time.all.length,
min: dec_time.min,
avg: avg,
max: dec_time.max
};
}

function decqueue_update(duration) {
decqueue_aggregate.all.push(duration);
decqueue_aggregate.min = Math.min(decqueue_aggregate.min, duration);
Expand Down Expand Up @@ -114,9 +198,13 @@ class pipeline {
return new TransformStream({
start(controller) {
this.decoder = decoder = new VideoDecoder({
output: frame => controller.enqueue(frame),
output: (frame) => {
const after = performance.now();
dec_update({output: 1, timestamp: frame.timestamp, time: after});
controller.enqueue(frame);
},
error: (e) => {
self.postMessage({severity: 'fatal', text: `Init Decoder error: ${e.message}`});
self.postMessage({severity: 'fatal', text: `Decoder error: ${e.message}`});
}
});
},
Expand All @@ -139,6 +227,8 @@ class pipeline {
try {
const queue = this.decoder.decodeQueueSize;
decqueue_update(queue);
const before = performance.now();
dec_update({output: 0, timestamp: chunk.timestamp, time: before});
this.decoder.decode(chunk);
} catch (e) {
self.postMessage({severity: 'fatal', text: 'Derror size: ' + chunk.byteLength + ' seq: ' + chunk.seqNo + ' kf: ' + chunk.keyframeIndex + ' delta: ' + chunk.deltaframeIndex + ' dur: ' + chunk.duration + ' ts: ' + chunk.timestamp + ' ssrc: ' + chunk.ssrc + ' pt: ' + chunk.pt + ' tid: ' + chunk.temporalLayerId + ' type: ' + chunk.type});
Expand Down Expand Up @@ -175,6 +265,10 @@ class pipeline {
config: decoderConfig
};
controller.enqueue(configChunk);
}
if (chunk.type != 'config'){
const after = performance.now();
enc_update({output: 1, timestamp: chunk.timestamp, time: after});
}
chunk.temporalLayerId = 0;
if (cfg.svc) {
Expand Down Expand Up @@ -218,6 +312,8 @@ class pipeline {
if (this.encoder.state != "closed") {
const queue = this.encoder.encodeQueueSize;
encqueue_update(queue);
const before = performance.now();
enc_update({output: 0, timestamp: frame.timestamp, time: before});
this.encoder.encode(frame, { keyFrame: insert_keyframe });
}
} catch(e) {
Expand All @@ -236,10 +332,16 @@ class pipeline {
this.stopped = true;
const len = encqueue_aggregate.all.length;
if (len > 1) {
const enc_stats = enc_report();
const encqueue_stats = encqueue_report();
const dec_stats = dec_report();
const decqueue_stats = decqueue_report();
self.postMessage({severity: 'chart'});
self.postMessage({severity: 'chart', x: 'Frame Number', y: 'Glass-Glass Latency', label: 'Glass-Glass Latency (ms) by Frame Number', div: 'chart2_div', text: ''});
self.postMessage({severity: 'chart', x: 'Timestamp', y: 'Encoding Time', label: 'Encoding Time (ms) by Timestamp', div: 'chart3_div', text: JSON.stringify(enc_time.all)});
self.postMessage({severity: 'chart', x: 'Timestamp', y: 'Decoding Time', label: 'Decoding Time (ms) by Timestamp', div: 'chart4_div', text: JSON.stringify(dec_time.all)});
self.postMessage({text: 'Encoder Time report: ' + JSON.stringify(enc_stats)});
self.postMessage({text: 'Encoder Queue report: ' + JSON.stringify(encqueue_stats)});
self.postMessage({text: 'Decoder Time report: ' + JSON.stringify(dec_stats)});
self.postMessage({text: 'Decoder Queue report: ' + JSON.stringify(decqueue_stats)});
}
self.postMessage({text: 'stop(): frame, encoder and decoder closed'});
Expand Down

0 comments on commit 2a0d426

Please sign in to comment.