MediaStream Capture Canvas and Audio Simultaneously
Solution 1
Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
Yes, you can do it using the MediaStream.addTrack()
method.
But Firefox will only use the initial stream's tracks into the Recorder until this bug has been fixed.
OP already known how to get all of it, but here is a reminder for future readers :
To get a videoStream track from the canvas, you can call
canvas.captureStream(framerate)
method.To get an audio streamTrack from a video element you can use the WebAudio API and it's
createMediaStreamDestination
method. This will return a MediaStreamDestination node (dest
) containing our audioStream. You'll then have to connect a MediaElementSource created from your video element, to thisdest
. If you need to add more audio tracks to this stream, you should connect all these sources todest
.
Now that we've got two streams, one for the canvas video and one for the audio, we can use canvasStream.addTrack(audioStream.getAudioTracks()[0])
just before initializing our new MediaRecorder(canvasStream)
.
Here is a complete example, that will work only in chrome now, and probably soon in Firefox, when they will have fixed the bug :
var cStream,
aStream,
vid,
recorder,
analyser,
dataArray,
bufferLength,
chunks = [];
function clickHandler() {
this.textContent = 'stop recording';
cStream = canvas.captureStream(30);
cStream.addTrack(aStream.getAudioTracks()[0]);
recorder = new MediaRecorder(cStream);
recorder.start();
recorder.ondataavailable = saveChunks;
recorder.onstop = exportStream;
this.onclick = stopRecording;
};
function exportStream(e) {
if (chunks.length) {
var blob = new Blob(chunks)
var vidURL = URL.createObjectURL(blob);
var vid = document.createElement('video');
vid.controls = true;
vid.src = vidURL;
vid.onend = function() {
URL.revokeObjectURL(vidURL);
}
document.body.insertBefore(vid, canvas);
} else {
document.body.insertBefore(document.createTextNode('no data saved'), canvas);
}
}
function saveChunks(e) {
e.data.size && chunks.push(e.data);
}
function stopRecording() {
vid.pause();
this.parentNode.removeChild(this);
recorder.stop();
}
function initAudioStream(evt) {
var audioCtx = new AudioContext();
// create a stream from our AudioContext
var dest = audioCtx.createMediaStreamDestination();
aStream = dest.stream;
// connect our video element's output to the stream
var sourceNode = audioCtx.createMediaElementSource(this);
sourceNode.connect(dest)
// start the video
this.play();
// just for the fancy canvas drawings
analyser = audioCtx.createAnalyser();
sourceNode.connect(analyser);
analyser.fftSize = 2048;
bufferLength = analyser.frequencyBinCount;
dataArray = new Uint8Array(bufferLength);
analyser.getByteTimeDomainData(dataArray);
// output to our headphones
sourceNode.connect(audioCtx.destination)
startCanvasAnim();
rec.onclick = clickHandler;
rec.disabled = false;
};
var loadVideo = function() {
vid = document.createElement('video');
vid.crossOrigin = 'anonymous';
vid.oncanplay = initAudioStream;
vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4';
}
function startCanvasAnim() {
// from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples
var canvasCtx = canvas.getContext('2d');
canvasCtx.fillStyle = 'rgb(200, 200, 200)';
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = 'rgb(0, 0, 0)';
var draw = function() {
var drawVisual = requestAnimationFrame(draw);
analyser.getByteTimeDomainData(dataArray);
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for (var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
}
x += sliceWidth;
}
canvasCtx.lineTo(canvas.width, canvas.height / 2);
canvasCtx.stroke();
};
draw();
}
loadVideo();
<canvas id="canvas" width="500" height="200"></canvas>
<button id="rec" disabled>record</button>
Ps : Since FF team seems to take some time to fix the bug, here is a quick fix to make it work on FF too.
You can also mix two tracks by using new MediaStream([track1, track2])
.
However, chrome currently prefixes this constructor, but since it does support addTrack
, it's not really needed, and we can come with something as ugly as
var mixedStream = 'MediaStream' in window ?
new MediaStream([cStream.getVideoTracks()[0], aStream.getAudioTracks()[0]]) :
cStream;
recorder = new MediaRecorder(mixedStream);
Working fiddle for both FF and chrome.
Solution 2
Kaiido's demo is brilliant. For those just looking for the tl;dr code to add an audio stream to their existing canvas stream:
let videoOrAudioElement = /* your audio source element */;
// get the audio track:
let ctx = new AudioContext();
let dest = ctx.createMediaStreamDestination();
let sourceNode = ctx.createMediaElementSource(videoOrAudioElement);
sourceNode.connect(dest);
sourceNode.connect(ctx.destination);
let audioTrack = dest.stream.getAudioTracks()[0];
// add it to your canvas stream:
canvasStream.addTrack(audioTrack);
// use your canvas stream like you would normally:
let recorder = new MediaRecorder(canvasStream);
// ...
Related videos on Youtube
dsing7
Updated on June 14, 2022Comments
-
dsing7 about 2 years
I'm working on a project in which I'd like to:
- Load a video js and display it on the canvas.
- Use filters to alter the appearance of the canvas (and therefore the video).
- Use the MediaStream captureStream() method and a MediaRecorder object to record the surface of the canvas and the audio of the original video.
- Play the stream of both the canvas and the audio in an HTML video element.
I've been able to display the canvas recording in a video element by tweaking this WebRTC demo code: https://webrtc.github.io/samples/src/content/capture/canvas-record/
That said, I can't figure out how to record the video's audio alongside the canvas. Is it possible to create a MediaStream containing MediaStreamTrack instances from two different sources/elements?
According to the MediaStream API's specs there should theoretically be some way to accomplish this: https://w3c.github.io/mediacapture-main/#introduction
"The two main components in the MediaStream API are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object represents media of a single type that originates from one media source in the User Agent, e.g. video produced by a web camera. A MediaStream is used to group several MediaStreamTrack objects into one unit that can be recorded or rendered in a media element."
-
Kaiido almost 8 yearssince the accepted answer is not specifically about p5.js, do you mind if I remove it from the title and the body of your question?
-
dsing7 almost 8 yearsNot at all, go for it.
-
dsing7 almost 8 yearsThank you for the quick response and the code example! Good to know that mixed canvas/audio streams will be feasible (at least in Firefox) once the bug gets addressed.
-
Kaiido almost 8 years@dsing7, actually chrome has no bug, I made a typo (forgot to connect the sourceNode to the MediaStreamDestination...) Fixed the code, which now works in Chrome.
-
dsing7 almost 8 yearsGlad to hear Chrome works as expected. Thanks for the heads up!
-
dsing7 almost 8 yearshow would I go about adding a second unique audio element to my MediaStream? This would be audio from an mp3, rather than a video. Should I create an additional media stream destination, destination stream, and media element source; connect them, and then add the new destination stream to my MediaStream with addTrack()? I just tried this, and the exported video doesn't have the audio from my mp3.
-
dsing7 almost 8 yearsHere are the relevant sections of my code: pastebin.com/f4RwH7MD
-
Kaiido almost 8 years@dsing7 I didn't tried it yet, but I would connect both sourceNodes to the same mediaStreamDestination.
-
dsing7 almost 8 yearsYup, looks like connecting both to the same mediaStreamDestination puts the mp3 audio in my final video. Thanks again!
-
guest271314 over 7 years@Kaiido
Uncaught TypeError: Cannot read property 'getAudioTracks' of undefined
is logged atconsole
-
Kaiido over 7 years@guest271314, which browser ? From the snippet ? Are you somehow clicking the button before the video has loaded ?
-
guest271314 over 7 years@Kaiido "Are you somehow clicking the button before the video has loaded ?" That was it. Clicking stop logged
Uncaught InvalidStateError: Failed to execute 'stop' on 'MediaRecorder': The MediaRecorder's state is 'inactive'."
. Perhaps set button todisabled
after button is clicked once? -
Kaiido over 7 yearsor perhaps not since it's just an example on how to record the canvas along with audio and not a purely functional app ? There would be a lot of feature detection to do too, to avoid a lot of errors that would raise from older browsers, but the point of the question is not really there, it's not a tutorial just a minimal example on how to do it. I don't think this would add anything to the answer.
-
IvRRimUm over 7 yearsThank you so much. probably saved me days of testing.
-
Admin almost 6 years@Kaiido, you're making the community worse by downvoting content and not leaving a reason. I'm made it a community wiki, so you can improve it if needed. This code snippet prevents people from having to parse through your full demo when they just wanted the raw logic. I've made it a community wiki so you can improve it if your downvote was motivated by an error in the answer.
-
Aeramor over 3 yearsThis is old but you don't know how much this helped me. Thank you!
-
savram over 2 yearsIs it possible to record the canvas with audio from the microphone and another audio from the webpage? Is it possible to have 2 or more audios in the same recording? I have a game that I want to record and use the microphone, but also record any other audios that are playing while I'm recording.
-
savram over 2 years@Kaiido ? Please help ;_;
-
savram over 2 yearsIs it possible to record the canvas with audio from the microphone and another audio from the webpage? Is it possible to have 2 or more audios in the same recording? I have a game that I want to record and use the microphone, but also record any other audios that are playing while I'm recording.
-
Kaiido over 2 years@savram sure just
.connect()
whatever source you have to the final mediaStreamDestination. -
savram over 2 years@Kaiido sorry for bothering you, I'd create a new question if my account wasn't blocked. So this will work even if I have like a background music, and other sounds playing? If I call .connect it will work just like that, without disrupting the sounds of the game? I guess I could just search for all audio nodes in the DOM?
-
Kaiido over 2 yearsYes you have to find all the sources and connect them to the mediastreamdestination node. To keep them playing you also connect them to the audiocontext's destination.
-
savram over 2 yearsWhen you say connect them to mediastreamdestination node, you mean this line: var dest = audioCtx.createMediaStreamDestination(); ? And what else should I connect them to, what is the audiocontext's destination? Could you give me a code snippet? Thanks!
-
Kaiido over 2 yearsNo I can't. This is not a helpdesk and I have many other things to do than helping anyone in their own projects. That's not how this website works. Please do search on your own, improve whatever granted you a question ban and come back here with your own questions if you really face a clear and focused programming issue.
-
savram over 2 yearsThank you for your time. Improving existing questions is impossible though. My only hope is when I get out of the ban in six months for 1 question, my next question has many upvotes.