Audio input/output
js
JavaScript
document.querySelector("#btnStart").addEventListener("click", async () => {
// DOM elements
const audioElement = document.createElement("audio");
audioElement.setAttribute("controls", true);
audioElement.setAttribute("autoplay", "autoplay");
document.body.appendChild(audioElement);
// Capture audio
const inputMediaStream = await navigator.mediaDevices.getUserMedia({
audio: true,
});
const audioContext = new AudioContext();
// Source
const audioSourceStream =
audioContext.createMediaStreamSource(inputMediaStream);
// const audioSourceElement = audioContext.createMediaElementSource(audioElement);
// const audioSourceTrack = audioContext.createMediaStreamTrackSource(inputMediaStream.getAudioTracks()[0]);
// Destination
const audioDestinationStream =
audioContext.createMediaStreamDestination();
const audioDestinationDefault = audioContext.destination;
// Connect source to destination
// audioSourceStream.connect(audioDestinationDefault);
audioSourceStream.connect(audioDestinationStream);
// Bind audioDestinationStream to audio element
const outputMediaStream = new MediaStream();
outputMediaStream.addTrack(
audioDestinationStream.stream.getAudioTracks()[0]
);
audioElement.srcObject = outputMediaStream;
// draw
const canvas = document.createElement("canvas");
document.body.appendChild(canvas);
const canvasContext = canvas.getContext("2d");
// following code found here: https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createAnalyser
const analyser = audioContext.createAnalyser();
analyser.minDecibels = -90;
analyser.maxDecibels = -10;
analyser.smoothingTimeConstant = 0.85;
analyser.connect(audioDestinationStream);
// const bufferLength = analyser.frequencyBinCount;
const width = canvas.width;
const height = canvas.height;
analyser.fftSize = 2048;
const bufferLength = analyser.fftSize;
// const dataArray = new Float32Array(bufferLength);
const dataArray = new Uint8Array(bufferLength);
canvasContext.clearRect(0, 0, width, height);
const draw = function () {
drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasContext.fillStyle = "rgb(200, 200, 200)";
canvasContext.clearRect(0, 0, width, height);
canvasContext.lineWidth = 2;
canvasContext.strokeStyle = "rgb(0, 0, 0)";
canvasContext.beginPath();
const sliceWidth = (width * 1.0) / bufferLength;
let x = 0;
for (let i = 0; i < bufferLength; i++) {
let v = dataArray[i] / 128.0;
let y = (v * height) / 2;
if (i === 0) {
canvasContext.moveTo(x, y);
} else {
canvasContext.lineTo(x, y);
}
x += sliceWidth;
}
canvasContext.lineTo(width, height / 2);
canvasContext.stroke();
};
draw();
// record
// const chunks = [];
// const mediaRecorder = new MediaRecorder(dest.stream);
// mediaRecorder.start();
// mediaRecorder.stop();
// mediaRecorder.addEventListener("dataavailable", e => chunks.push(e.data));
// mediaRecorder.addEventListener("stop", e => {
// const blob = new Blob(chunks, { type: "audio/ogg; codecs=opus" });
// document.querySelector("audio").src = URL.createObjectURL(blob);
// });
});