MediaStream Capture Canvas and Audio at the same time

I am working on a project in which I want:

  • Download the js video and show it on the canvas.
  • Use filters to change the look of the canvas (and therefore the video).
  • Use the captureStream () method of the MediaStream and the MediaRecorder object to record the surface of the canvas and sound of the source video.
  • Play a stream of both canvas and audio in an HTML video element.

I was able to display the canvas entry in the video element by setting up this WebRTC demo code: https://webrtc.imtqy.com/samples/src/content/capture/canvas-record/

However, I cannot figure out how to record a video image next to the canvas. Is it possible to create a MediaStream containing instances of MediaStreamTrack from two different sources / elements?

According to MediaStream API specifications, there should theoretically be some way to accomplish this: https://w3c.imtqy.com/mediacapture-main/#introduction

β€œThe two main components of the MediaStream APIs are the MediaStreamTrack and MediaStream interfaces. The MediaStreamTrack object is one type of media that comes from a single media source in the User Agent, such as a video created by a webcam. MediaStream is used to group multiple MediaStreamTrack objects into a single unit, which can be recorded or displayed in the media element. "

+7
javascript canvas mediarecorder mediastream
source share
1 answer

Is it possible to create a MediaStream containing instances of MediaStreamTrack from two different sources / elements?

Yes, you can do this using the MediaStream.addTrack() method.

But Firefox will only use the start-up paths of the stream in the Recorder until this error is fixed.


OP already knows how to get all this, but there is a reminder for future readers:

  • To get the videoStream track from the canvas, you can call the canvas.captureStream(framerate) method.

  • To get the audio stream from a video element, you can use the WebAudio API and the createMediaStreamDestination method. This will return a MediaStreamDestination node ( dest ) containing our audio stream. Then you need to connect the MediaElementSource created from your video element to this dest . If you need to add additional audio tracks to this stream, you must connect all these sources to dest .

Now that we have two streams: one for video canvas and one for audio, we can use canvasStream.addTrack(audioStream.getAudioTracks()[0]) immediately before initializing a new MediaRecorder(canvasStream) .

Here is a complete example that will only work now in Chrome and probably in Firefox when they fix the error:

 var cStream, aStream, vid, recorder, analyser, dataArray, bufferLength, chunks = []; function clickHandler() { this.textContent = 'stop recording'; cStream = canvas.captureStream(30); cStream.addTrack(aStream.getAudioTracks()[0]); recorder = new MediaRecorder(cStream); recorder.start(); recorder.ondataavailable = saveChunks; recorder.onstop = exportStream; this.onclick = stopRecording; }; function exportStream(e) { if (chunks.length) { var blob = new Blob(chunks) var vidURL = URL.createObjectURL(blob); var vid = document.createElement('video'); vid.controls = true; vid.src = vidURL; vid.onend = function() { URL.revokeObjectURL(vidURL); } document.body.insertBefore(vid, canvas); } else { document.body.insertBefore(document.createTextNode('no data saved'), canvas); } } function saveChunks(e) { e.data.size && chunks.push(e.data); } function stopRecording() { vid.pause(); this.parentNode.removeChild(this); recorder.stop(); } function initAudioStream(evt) { var audioCtx = new AudioContext(); // create a stream from our AudioContext var dest = audioCtx.createMediaStreamDestination(); aStream = dest.stream; // connect our video element output to the stream var sourceNode = audioCtx.createMediaElementSource(this); sourceNode.connect(dest) // start the video this.play(); // just for the fancy canvas drawings analyser = audioCtx.createAnalyser(); sourceNode.connect(analyser); analyser.fftSize = 2048; bufferLength = analyser.frequencyBinCount; dataArray = new Uint8Array(bufferLength); analyser.getByteTimeDomainData(dataArray); // output to our headphones sourceNode.connect(audioCtx.destination) startCanvasAnim(); rec.onclick = clickHandler; rec.disabled = false; }; var loadVideo = function() { vid = document.createElement('video'); vid.crossOrigin = 'anonymous'; vid.oncanplay = initAudioStream; vid.src = 'https://dl.dropboxusercontent.com/s/bch2j17v6ny4ako/movie720p.mp4'; } function startCanvasAnim() { // from MDN https://developer.mozilla.org/en/docs/Web/API/AnalyserNode#Examples var canvasCtx = canvas.getContext('2d'); canvasCtx.fillStyle = 'rgb(200, 200, 200)'; canvasCtx.lineWidth = 2; canvasCtx.strokeStyle = 'rgb(0, 0, 0)'; var draw = function() { var drawVisual = requestAnimationFrame(draw); analyser.getByteTimeDomainData(dataArray); canvasCtx.fillRect(0, 0, canvas.width, canvas.height); canvasCtx.beginPath(); var sliceWidth = canvas.width * 1.0 / bufferLength; var x = 0; for (var i = 0; i < bufferLength; i++) { var v = dataArray[i] / 128.0; var y = v * canvas.height / 2; if (i === 0) { canvasCtx.moveTo(x, y); } else { canvasCtx.lineTo(x, y); } x += sliceWidth; } canvasCtx.lineTo(canvas.width, canvas.height / 2); canvasCtx.stroke(); }; draw(); } loadVideo(); 
 <canvas id="canvas" width="500" height="200"></canvas> <button id="rec" disabled>record</button> 

Ps : since the FF team seems to take some time to fix the error, here is a quick solution to make it work with FF as well.

You can also mix two tracks using new MediaStream([track1, track2]) .
However, chrome is currently the prefix of this constructor, but since it supports addTrack , it really is not needed, and we can come up with something as ugly as

 var mixedStream = 'MediaStream' in window ? new MediaStream([cStream.getVideoTracks()[0], aStream.getAudioTracks()[0]]) : cStream; recorder = new MediaRecorder(mixedStream); 

Working violin for FF and chrome.

+11
source share

All Articles