使用captureStream和mediaRecorder进行画布录制 [英] Canvas recording using captureStream and mediaRecorder
问题描述
我怎样才能记录来自多个画布的流?
即,当我将一个画布更改为其他画布时,它必须将活动画布继续记录到第一个画布。
我这样做了:
stream = canvas.captureStream();
mediaRecorder =新MediaRecorder(流,选项);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10);
函数handleDataAvailable(event){
recordedBlobs.push(event.data);
}
但是当添加另一个流时,只记录第一部分。我推送记录的数据到一个全局数组中。
在当前的实现中,不能切换录制的MediaRecorder流的音轨。
当您尝试这样做时,Firefox将您引导至控制台
MediaRecorder目前不支持录制多个相同类型的曲目。
而Chrome会保持沉默,并记录黑色框架,而不是第二轨道。
var canvases = Array.prototype.slice.call(document.querySelectorAll('canvas')),recordingStream,current = 0,chunks = [],recorder,switchInterval; function startRecording(){// first收集两个画布流&提取视频轨道let streams = canvases.map((c)=> {return c.captureStream(30)}); let tracks = streams.map((s)=> {return s.getVideoTracks()[0]}); //创建一个带有两个轨道的新MediaStream //我们不使用addTrack,因为https://bugzilla.mozilla.org/show_bug.cgi?id=1296531 recordingStream ='MediaStream'在窗口&&新的MediaStream(曲目)||新的webkitMediaStream(曲目); //初始化MediaRecorder刻录机= new MediaRecorder(recordingStream); recorder.ondataavailable = saveChunks; recorder.onstop = exportVideo; recorder.onerror =(e)=> {console.log(e.name)}; recorder.start(); stopRec.disabled = false; //切换画布每200ms记录一次switchInterval = setInterval(switchStream,200);} //开关静音其中一个音轨,然后其他函数switchStream(){current = +!current; var tracks = recordingStream.getVideoTracks();跟踪[current] .enabled = true; //评论因为看起来FF似乎不支持canvasTrack的方法//在chrome中无法工作//即使在那里// track [current] .requestFrame();跟踪[+!current] .enabled = false;} function saveChunks(evt){//存储我们视频的块(evt.data.size> 0){chunks.push(evt.data); stopRec.onclick = function stopRecording(){if(recorder.state!=='recording'){this.disabled = true;返回; } //停止一切记录.stop(); //这将触发exportVideo clearInterval(switchInterval); stopCanvasAnim(); a.style.display = b.style.display ='none'; this.parentNode.innerHTML =;} function exportVideo(){//我们已经得到了一切vid.src = URL.createObjectURL(new Blob(chunks));} var stopCanvasAnim =(function initCanvasDrawing(){// some花哨的图画var aCtx = canvases [0] .getContext('2d'),bCtx = canvases [1] .getContext('2d'); var objects = [],w = canvases [0] .width,h = canvases [ 0] .height; aCtx.fillStyle = bCtx.fillStyle ='ivory'; for(var i = 0; i< 100; i ++){objects.push({angle:Math.random()* 360,x:100 +(Math.random()* w / 2),y:100 +(Math.random()* h / 2),半径:10+(Math.random()* 40),速度:1 + Math.random ()0); var stop = false; var draw = function(){aCtx.fillRect(0,0,w,h); bCtx.fillRect(0,0,w,h); for(var n = 0; n <100; n ++){var entity = objects [n],velY = Math.cos(entity.angle * Math.PI / 180)* entity.speed,velX = Math.sin(entity.angle * Math.PI / 180)* entity.speed; entity.x + = v elX; entity.y - = velY; aCtx.drawImage(imgA,entity.x,entity.y,entity.radius,entity.radius); bCtx.drawImage(imgB,entity.x,entity.y,entity.radius,entity.radius); entity.angle ++; } if(!stop){requestAnimationFrame(draw); }} var imgA = new Image(); var imgB = new Image(); imgA.onload = function(){draw();的StartRecording(); }; imgA.crossOrigin = imgB.crossOrigin ='anonymous'; imgA.src =https://dl.dropboxusercontent.com/s/4e90e48s5vtmfbd/aaa.png; imgB.src =https://dl.dropboxusercontent.com/s/rumlhyme6s5f8pt/ABC.png; return function(){stop = true; };})();
< p> < / />停止录制< / button>< / p>< canvas id =a>< / canvas>< canvas id =b>< / canvas> ;< video id =vidcontrols>< / video>
请注意,目前在w3c github项目mediacapture-record上有一个未解决的问题关于这一点。
但是,这个问题有一个简单的解决方法:
- 使用其他
offscreen[hidden] *offscreen (现在最新的58金丝雀修复了chrome bug) 画布,仅用于记录器, - 在其上绘制所需画布的框架。
这样,没问题;-)
也可以使用相同的解决方法来保存不同的vi在同一台MediaRecorder上使用deos。
var canvases = document.querySelectorAll('canvas '),recordingCtx,current = 0,chunks = [],记录器,switchInterval; //在第三个函数中绘制我们的一个画布recordingAnim(){recordingCtx.drawImage(canvases [current],0,0); //如果录音机停止,请停止动画if(!recorder || recorder.state ==='recording'){requestAnimationFrame(recordingAnim); }} function startRecording(){var recordingCanvas = canvases [0] .cloneNode(); recordingCtx = recordingCanvas.getContext('2d'); recordingCanvas.id =; // chrome强制我们在doc中显示画布以便记录,//这个bug已在chrome 58.0.3014.0 recordingCtx.canvas.style.height = 0中修复; document.body.appendChild(recordingCtx.canvas); //在我们录制的一个画布上绘制一个画布anAnim(); // init MediaRecorder recorder = new MediaRecorder(recordingCtx.canvas.captureStream(30)); recorder.ondataavailable = saveChunks; recorder.onstop = exportVideo; recorder.start(); stopRec.onclick = stopRecording; //将画布切换为记录器每200ms switchInterval = setInterval(switchStream,200);} function saveChunks(evt){//存储我们的最终视频块如果(evt.data.size> 0){chunks.push(evt) 。数据); }} function stopRecording(){//停止一切,这将触发recorder.onstop recorder.stop(); clearInterval(switchInterval); stopCanvasAnim(); a.style.display = b.style.display ='none'; this.parentNode.innerHTML =; recordingCtx.canvas.parentNode.removeChild(recordingCtx.canvas)} //当我们得到everythingfunction时exportVideo(){vid.src = URL.createObjectURL(new Blob(chunks));} //在1和0之间切换switchStream ){current = +!current; } //一些花哨的图纸var stopCanvasAnim =(函数initCanvasDrawing(){var aCtx = canvases [0] .getContext('2d'),bCtx = canvases [1] .getContext('2d'); var objects = [],w = canvases [0] .width,h = canvases [0] .height; aCtx.fillStyle = bCtx.fillStyle ='ivory'; //从http://stackoverflow.com/a/23486828/3702797获取(var i (Math.random()* w / 2),y:100+(Math.random() ()* h / 2),radius:10 +(Math.random()* 40),speed:1 + Math.random()* 20});} var stop = false; var draw = function(){aCtx .fillRect(0,0,w,h); bCtx.fillRect(0,0,w,h); for(var n = 0; n <100; n ++){var entity = objects [n],velY = Math.cos(entity.angle * Math.PI / 180)* entity.speed,velX = Math.sin(entity.angle * Math.PI / 180)* entity.speed; entity.x + = velX; entity.y - = velY; aCtx.drawImage(imgA,entity.x,entity.y, ntity.radius,entity.radius); bCtx.drawImage(imgB,entity.x,entity.y,entity.radius,entity.radius); entity.angle ++; } if(!stop){requestAnimationFrame(draw); }} var imgA = new Image(); var imgB = new Image(); imgA.onload = function(){draw();的StartRecording(); }; imgA.crossOrigin = imgB.crossOrigin ='anonymous'; imgA.src =https://dl.dropboxusercontent.com/s/4e90e48s5vtmfbd/aaa.png; imgB.src =https://dl.dropboxusercontent.com/s/rumlhyme6s5f8pt/ABC.png; return function(){stop = true; };})();
< p> < / / &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& ;< video id =vidcontrols>< / video>
How can i record streams from more than one canvas? ie, when i change one canvas to other it has to record the active canvas continue to the first.
I have done like this:
stream = canvas.captureStream();
mediaRecorder = new MediaRecorder(stream, options);
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10);
function handleDataAvailable(event) {
recordedBlobs.push(event.data);
}
But when adding another stream, only the first part is recorded. I'am pushing recorded data to a global array.
In the current implementations, you can't switch the recorded tracks of a MediaRecorder's stream.
When you try to do so, Firefox throws you in the console that
MediaRecorder does not support recording multiple tracks of the same type at this time.
while Chrome keeps silent and records black frames instead of the second track...
var canvases = Array.prototype.slice.call(document.querySelectorAll('canvas')),
recordingStream,
current = 0,
chunks = [],
recorder,
switchInterval;
function startRecording() {
// first gather both canvases streams & extract the videoTracks
let streams = canvases.map((c) => {
return c.captureStream(30)
});
let tracks = streams.map((s) => {
return s.getVideoTracks()[0]
});
// create a new MediaStream with both tracks in it
// we don't use addTrack because of https://bugzilla.mozilla.org/show_bug.cgi?id=1296531
recordingStream = 'MediaStream' in window && new MediaStream(tracks) || new webkitMediaStream(tracks);
// init the MediaRecorder
recorder = new MediaRecorder(recordingStream);
recorder.ondataavailable = saveChunks;
recorder.onstop = exportVideo;
recorder.onerror = (e) => {
console.log(e.name)
};
recorder.start();
stopRec.disabled = false;
// switch the canvas to be recorder every 200ms
switchInterval = setInterval(switchStream, 200);
}
// switch mute one of the tracks, then the other
function switchStream() {
current = +!current;
var tracks = recordingStream.getVideoTracks();
tracks[current].enabled = true;
// commented because it seems FF doesn't support canvasTrack's method yet
// doesn't work in chrome even when there anyway
// tracks[current].requestFrame();
tracks[+!current].enabled = false;
}
function saveChunks(evt) {
// store our video's chunks
if (evt.data.size > 0) {
chunks.push(evt.data);
}
}
stopRec.onclick = function stopRecording() {
if (recorder.state !== 'recording') {
this.disabled = true;
return;
}
// stop everything
recorder.stop(); // this will trigger exportVideo
clearInterval(switchInterval);
stopCanvasAnim();
a.style.display = b.style.display = 'none';
this.parentNode.innerHTML = "";
}
function exportVideo() {
// we've got everything
vid.src = URL.createObjectURL(new Blob(chunks));
}
var stopCanvasAnim = (function initCanvasDrawing() {
// some fancy drawings
var aCtx = canvases[0].getContext('2d'),
bCtx = canvases[1].getContext('2d');
var objects = [],
w = canvases[0].width,
h = canvases[0].height;
aCtx.fillStyle = bCtx.fillStyle = 'ivory';
for (var i = 0; i < 100; i++) {
objects.push({
angle: Math.random() * 360,
x: 100 + (Math.random() * w / 2),
y: 100 + (Math.random() * h / 2),
radius: 10 + (Math.random() * 40),
speed: 1 + Math.random() * 20
});
}
var stop = false;
var draw = function() {
aCtx.fillRect(0, 0, w, h);
bCtx.fillRect(0, 0, w, h);
for (var n = 0; n < 100; n++) {
var entity = objects[n],
velY = Math.cos(entity.angle * Math.PI / 180) * entity.speed,
velX = Math.sin(entity.angle * Math.PI / 180) * entity.speed;
entity.x += velX;
entity.y -= velY;
aCtx.drawImage(imgA, entity.x, entity.y, entity.radius, entity.radius);
bCtx.drawImage(imgB, entity.x, entity.y, entity.radius, entity.radius);
entity.angle++;
}
if (!stop) {
requestAnimationFrame(draw);
}
}
var imgA = new Image();
var imgB = new Image();
imgA.onload = function() {
draw();
startRecording();
};
imgA.crossOrigin = imgB.crossOrigin = 'anonymous';
imgA.src = "https://dl.dropboxusercontent.com/s/4e90e48s5vtmfbd/aaa.png";
imgB.src = "https://dl.dropboxusercontent.com/s/rumlhyme6s5f8pt/ABC.png";
return function() {
stop = true;
};
})();
<p>
<button id="stopRec" disabled>stop recording</button>
</p>
<canvas id="a"></canvas>
<canvas id="b"></canvas>
<video id="vid" controls></video>
Note that there is currently an open issue on the w3c github project mediacapture-record about this.
But, there is a simple workaround to this issue :
- use an other
offscreen[hidden]*offscreen (the chrome bug is now fixed in latest 58 canary) canvas, only used for the recorder, - draw the frames of the wanted canvas on it.
This way, no problem ;-)
The same workaround could also be used to save different videos on the same MediaRecorder.
var canvases = document.querySelectorAll('canvas'),
recordingCtx,
current = 0,
chunks = [],
recorder,
switchInterval;
// draw one of our canvas on a third one
function recordingAnim() {
recordingCtx.drawImage(canvases[current], 0, 0);
// if recorder is stopped, stop the animation
if (!recorder || recorder.state === 'recording') {
requestAnimationFrame(recordingAnim);
}
}
function startRecording() {
var recordingCanvas = canvases[0].cloneNode();
recordingCtx = recordingCanvas.getContext('2d');
recordingCanvas.id = "";
// chrome forces us to display the canvas in doc so it can be recorded,
// This bug has been fixed in chrome 58.0.3014.0
recordingCtx.canvas.style.height = 0;
document.body.appendChild(recordingCtx.canvas);
// draw one of the canvases on our recording one
recordingAnim();
// init the MediaRecorder
recorder = new MediaRecorder(recordingCtx.canvas.captureStream(30));
recorder.ondataavailable = saveChunks;
recorder.onstop = exportVideo;
recorder.start();
stopRec.onclick = stopRecording;
// switch the canvas to be recorder every 200ms
switchInterval = setInterval(switchStream, 200);
}
function saveChunks(evt) {
// store our final video's chunks
if (evt.data.size > 0) {
chunks.push(evt.data);
}
}
function stopRecording() {
// stop everything, this will trigger recorder.onstop
recorder.stop();
clearInterval(switchInterval);
stopCanvasAnim();
a.style.display = b.style.display = 'none';
this.parentNode.innerHTML = "";
recordingCtx.canvas.parentNode.removeChild(recordingCtx.canvas)
}
// when we've got everything
function exportVideo() {
vid.src = URL.createObjectURL(new Blob(chunks));
}
// switch between 1 and 0
function switchStream() {
current = +!current;
}
// some fancy drawings
var stopCanvasAnim = (function initCanvasDrawing() {
var aCtx = canvases[0].getContext('2d'),
bCtx = canvases[1].getContext('2d');
var objects = [],
w = canvases[0].width,
h = canvases[0].height;
aCtx.fillStyle = bCtx.fillStyle = 'ivory';
// taken from http://stackoverflow.com/a/23486828/3702797
for (var i = 0; i < 100; i++) {
objects.push({
angle: Math.random() * 360,
x: 100 + (Math.random() * w / 2),
y: 100 + (Math.random() * h / 2),
radius: 10 + (Math.random() * 40),
speed: 1 + Math.random() * 20
});
}
var stop = false;
var draw = function() {
aCtx.fillRect(0, 0, w, h);
bCtx.fillRect(0, 0, w, h);
for (var n = 0; n < 100; n++) {
var entity = objects[n],
velY = Math.cos(entity.angle * Math.PI / 180) * entity.speed,
velX = Math.sin(entity.angle * Math.PI / 180) * entity.speed;
entity.x += velX;
entity.y -= velY;
aCtx.drawImage(imgA, entity.x, entity.y, entity.radius, entity.radius);
bCtx.drawImage(imgB, entity.x, entity.y, entity.radius, entity.radius);
entity.angle++;
}
if (!stop) {
requestAnimationFrame(draw);
}
}
var imgA = new Image();
var imgB = new Image();
imgA.onload = function() {
draw();
startRecording();
};
imgA.crossOrigin = imgB.crossOrigin = 'anonymous';
imgA.src = "https://dl.dropboxusercontent.com/s/4e90e48s5vtmfbd/aaa.png";
imgB.src = "https://dl.dropboxusercontent.com/s/rumlhyme6s5f8pt/ABC.png";
return function() {
stop = true;
};
})();
<p>
<button id="stopRec">stop recording</button>
</p>
<canvas id="a"></canvas>
<canvas id="b"></canvas>
<video id="vid" controls></video>
这篇关于使用captureStream和mediaRecorder进行画布录制的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!