blob: 419e840b34554df97b9343be29a758db3b79b4d8 [file] [log] [blame]
Thomas Guilbertd7a274b2023-03-16 00:22:291<!DOCTYPE html>
2<html>
3<head>
4 <title>Audio latency tracing page</title>
5</head>
6
7<body>
8<div id="container">
9 <p> This page is meant to serve as an example of how to use "audio.latency"
10 tracing to measure internal audio latency. The "audio.latency" category
11 enables code which listens for jumps in amplitude (volume) and starts or
12 stops tracing. We start tracing right after receiving loud audio from a
13 microphone, and stop right before sending that loud audio to speakers.
14 The duration of the trace event (which should show up as an
15 "AmplitudePeak" in the tracing tools) encompases the total internal latency.
16 </p>
17 <p>Instructions:
18 <ul>
19 <li>[Prerequisite] Close all other tabs but this one.</li>
20 <li>[Prerequisite] Make sure there is a microphone plugged into the test
21 machine, and that the surrounding environment is not too loud.</li>
22 <li>Open chrome://tracing and start recording a trace which includes the
23 "audio.latency" category.</li>
24 <li>Click the "Initialize" button.</li>
25 <li>Select either the WebAudio or the HTMLAudioElement button.</li>
26 <li>Repeatedly clap next to the microphone a few times. Make sure to clap
27 clearly, and to leave time between claps (0.5s-1s should be enough).</li>
28 <li>Stop the trace. "AmplitudePeak" events should show up under the audio
29 service process</li>
30 </ul>
31 </p>
32 <p>
33 Note: The "audio.latency" category only expects one input and one output.
34 Multiple IOs will result in incoherent traces. Additionally,
35 tracing <b>*must*</b> be started before starting the test, or no traces will
36 be captured. Refreshing the page after starting a trace is also not enough:
37 one must verify that there are no InputStreams or OutputStreams alive, by
38 navigating to the "audio" tab of chrome://media-internals. Closing all tabs
39 and waiting 2-10s should be enough for all outstanding streams to close.
40 </p>
41 <button id="initBtn" onClick="init()">Initialize</button>
42 <br/>
43 <br/>
44 <div id="routeMsg"></div>
45 <div id="outputTypesDiv" style="visibility:hidden">
46 <button id="mssnBtn" onClick="trackToMSSN()">Use WebAudio</button>
47 <button id="audioElementBtn" onClick="trackToAudioElement()">Use HTMLAudioElement</button>
48 </div>
49 <div id="errorMsg"></div>
50</div>
51
52<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
53<script>
54const constraints = {
55 audio: true,
56 video: false
57};
58
59let track;
60let stream;
61
62let audioContext;
63let streamNode;
64
65let audioElement;
66
67function displayRoute(msg) {
68 document.querySelector("#routeMsg").innerHTML = msg;
69}
70
71function disable(id) {
72 document.querySelector(id).disabled = true;
73}
74
75function show(id) {
76 document.querySelector(id).style.visibility = 'visible';
77}
78
79function hide(id) {
80 document.querySelector(id).style.visibility = 'hidden';
81}
82
83async function initAudioContext() {
84 if(!audioContext) {
85 audioContext = new AudioContext();
86 }
87}
88
89function handleSuccess() {
90 const audioTracks = stream.getAudioTracks();
91 console.log(`Using Audio device: ${audioTracks[0].label}`);
92 console.log(audioTracks);
93 track = audioTracks[0];
94 window.track = track; // make variable available to browser console
95}
96
97function handleError(error) {
98 if (error.name === 'PermissionDeniedError') {
99 errorMsg('Permissions have not been granted to use your camera and ' +
100 'microphone, you need to allow the page access to your devices in ' +
101 'order for the demo to work.');
102 }
103 errorMsg(`getUserMedia error: ${error.name}`, error);
104}
105
106function errorMsg(msg, error) {
107 const errorElement = document.querySelector('#errorMsg');
108 errorElement.innerHTML += `<p>${msg}</p>`;
109 if (typeof error !== 'undefined') {
110 console.error(error);
111 }
112}
113
114async function init() {
115 try {
116 stream = await navigator.mediaDevices.getUserMedia(constraints);
117 handleSuccess();
118 } catch (e) {
119 handleError(e);
120 }
121
122 await initAudioContext();
123
124 disable("#initBtn");
125 show("#outputTypesDiv");
126}
127
128function trackToMSSN() {
129 streamNode = audioContext.createMediaStreamSource(stream);
130 streamNode.connect(audioContext.destination);
131 hide("#outputTypesDiv");
132 displayRoute("gUM --> MediaStreamSourceNode --> audioContext.destination");
133}
134
135function trackToAudioElement() {
136 audioElement = document.createElement('audio');
137 audioElement.srcObject = stream;
138 audioElement.play()
139 hide("#outputTypesDiv");
140 displayRoute("gUM --> MediaStream --> <audio>.srcObject");
141}
142
143</script>
144
145</body>
146</html>