使用原件javascript
在 MediaRecorder-examples/record-canvas-to-video.js
Software requirements
- Firefox 45. This is a Firefox technical demo. So it might not work on your browser, if it doesn't implement what we're demoing. At the time of writing (January 2016), you need to download either Firefox Developer Edition or Firefox Nightly.
window.onload = function () {
var video = document.getElementById('video');
var canvas = document.getElementById('canvas');
var width = canvas.width;
var height = canvas.height;
var capturing = false;
video.width = width;
video.height = height;
// We need the 2D context to individually manipulate pixel data
var ctx = canvas.getContext('2d');
// Start with a black background
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, width, height);
// Since we're continuously accessing and overwriting the pixels
// object, we'll request it once and reuse it across calls to draw()
// for best performance (we don't need to create ImageData objects
// on every frame)
var pixels = ctx.getImageData(0, 0, width, height);
var data = pixels.data;
var numPixels = data.length;
var stream = canvas.captureStream(15);
var recorder = new MediaRecorder(stream);
recorder.addEventListener('dataavailable', finishCapturing);
startCapturing();
recorder.start();
setTimeout(function() {
recorder.stop();
}, 2000);
function startCapturing() {
capturing = true;
draw();
}
function finishCapturing(e) {
capturing = false;
var videoData = [ e.data ];
var blob = new Blob(videoData, { 'type': 'video/webm' });
var videoURL = URL.createObjectURL(blob);
video.src = videoURL;
video.play();
}
function draw() {
// We don't want to render again if we're not capturing
if(capturing) {
requestAnimationFrame(draw);
}
drawWhiteNoise();
}
function drawWhiteNoise() {
var offset = 0;
for(var i = 0; i < numPixels; i++) {
var grey = Math.round(Math.random() * 255);
// The data array has pixel values in RGBA order
// (Red, Green, Blue and Alpha for transparency)
// We will make R, G and B have the same value ('grey'),
// then skip the Alpha value by increasing the offset,
// as we're happy with the opaque value we set when painting
// the background black at the beginning
data[offset++] = grey;
data[offset++] = grey;
data[offset++] = grey;
offset++; // skip the alpha component
}
// And tell the context to draw the updated pixels in the canvas
ctx.putImageData(pixels, 0, 0);
}
};
在 Chrome 55 时产生错误
Uncaught (in promise) DOMException: The play() request was interrupted by a new load request.
Failed to load resource: the server responded with a status of 416 (Requested Range Not Satisfiable)
尽管在 Firefox 52 上返回预期结果。
调整javascript
通过按 Blob
在 Chromium 上使用在dataavailable
事件MediaRecorder
到一个数组,然后在 stop
处连接 blob事件
window.onload = function () {
var blobs = [];
var video = document.getElementById('video');
var canvas = document.getElementById('canvas');
var width = canvas.width;
var height = canvas.height;
var capturing = false;
video.width = width;
video.height = height;
// We need the 2D context to individually manipulate pixel data
var ctx = canvas.getContext('2d');
// Start with a black background
ctx.fillStyle = '#000';
ctx.fillRect(0, 0, width, height);
// Since we're continuously accessing and overwriting the pixels
// object, we'll request it once and reuse it across calls to draw()
// for best performance (we don't need to create ImageData objects
// on every frame)
var pixels = ctx.getImageData(0, 0, width, height);
var data = pixels.data;
var numPixels = data.length;
var stream = canvas.captureStream(15);
var recorder = new MediaRecorder(stream);
recorder.addEventListener('dataavailable', finishCapturing);
recorder.addEventListener('stop', function(e) {
video.oncanplay = video.play;
video.src = URL.createObjectURL(new Blob(blobs, {type:"video/webm"}));
});
startCapturing();
recorder.start();
setTimeout(function() {
capturing = false;
recorder.stop();
}, 2000);
function startCapturing() {
capturing = true;
draw();
}
function finishCapturing(e) {
blobs.push(e.data);
}
function draw() {
// We don't want to render again if we're not capturing
if(capturing) {
requestAnimationFrame(draw);
}
drawWhiteNoise();
}
function drawWhiteNoise() {
var offset = 0;
for(var i = 0; i < numPixels; i++) {
var grey = Math.round(Math.random() * 255);
// The data array has pixel values in RGBA order
// (Red, Green, Blue and Alpha for transparency)
// We will make R, G and B have the same value ('grey'),
// then skip the Alpha value by increasing the offset,
// as we're happy with the opaque value we set when painting
// the background black at the beginning
data[offset++] = grey;
data[offset++] = grey;
data[offset++] = grey;
offset++; // skip the alpha component
}
// And tell the context to draw the updated pixels in the canvas
ctx.putImageData(pixels, 0, 0);
}
};
以与 Firefox 类似的方式呈现录制的流。
但是,在 Firefox 和 Chromium 上播放视频时所做的调整,虽然连接 Blob 之间存在明显的延迟,但渲染效果明显最小。
我们如何渲染 canvas.captureStream()
的相同视觉播放效果使用MediaRecorder()
记录在<video>
元素?
最佳答案
您在这里从主 JS 线程驱动动画,因此其他主线程 JS 事件(例如 ondataavailable
回调触发)可能会扰乱时序,足以引起注意。
尝试在 canvas.captureStream()
调用中忽略 (60) 帧速率。
MDN说:“如果未设置,则每次 Canvas 更改时都会捕获一个新帧;如果设置为 0,则将捕获一个单帧。”
这有望使输出更不受此类中断的影响,但代价是稍微缩短其长度。
您还可以指定 timeslice使用 start 方法,例如recorder.start(2000)
用于限制何时触发 dataavailable
事件以避免中断。
关于javascript - <video> 使用 MediaRecorder() 从 <canvas> 播放录制的流,使用 canvas.captureStream() 在 firefox、chromium 上呈现不同的效果,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/39937863/