我做了一个简单的设置,获取网络摄像头/手机摄像头流并将其传递,在 html 2d Canvas 上绘图。
但是我一直无法弄清楚如何以几秒钟的延迟显示流。有点像延迟镜。
我尝试使用 ctx.globalAlpha = 0.005;
但这给了我重影效果,而不是“延迟”流。
知道如何实现这一目标吗?
- 下面的代码片段在这里不起作用,可能是因为显然是安全问题,但这里有一支笔:
https://codepen.io/farisk/pen/LvmGGQ
var width = 0, height = 0;
var canvas = document.createElement('canvas'),
ctx = canvas.getContext('2d');
document.body.appendChild(canvas);
var video = document.createElement('video'),
track;
video.setAttribute('autoplay',true);
window.vid = video;
function getWebcam(){
navigator.mediaDevices.getUserMedia({ video: true }).then(function(stream) {
var videoTracks = stream.getVideoTracks();
var newStream = new MediaStream(stream.getVideoTracks());
video.srcObject = newStream;
video.play();
track = stream.getTracks()[0];
}, function(e) {
console.error('Rejected!', e);
});
}
getWebcam();
var rotation = 0,
loopFrame,
centerX,
centerY,
twoPI = Math.PI * 2;
function loop(){
loopFrame = requestAnimationFrame(loop);
// ctx.globalAlpha = 0.005;
ctx.drawImage(video, 0, 0, width, height);
ctx.restore();
}
function startLoop(){
loopFrame = requestAnimationFrame(loop);
}
video.addEventListener('loadedmetadata',function(){
width = canvas.width = video.videoWidth;
height = canvas.height = video.videoHeight;
centerX = width / 2;
centerY = height / 2;
startLoop();
});
canvas.addEventListener('click',function(){
if ( track ) {
if ( track.stop ) { track.stop(); }
track = null;
} else {
getWebcam();
}
});
video,
canvas {
max-width: 100%;
height: auto;
}
- 下面的代码片段在这里不起作用,可能是因为显然是安全问题,但这里有一支笔:
最佳答案
您可以缓冲流并播放缓冲的内容。
要使用 MediaStream
实现此目的,您可以使用 MediaRecorder API ,以及 MediaSource API .
基本思想是记录流的运行情况,并在生成每个新 block 时将其缓冲到MediaSource
中。然后我们只需要在视频暂停的情况下等待我们想要的延迟,然后再开始播放。
(async () => {
const delay = 3000;
const mimeType = `video/webm; codecs="vp8"`;
const stream = await getStream();
document.getElementById("realtime").srcObject = stream;
const mediaSource = new MediaSource();
const delayed = document.getElementById("delayed");
delayed.src = URL.createObjectURL(mediaSource);
await new Promise((res) =>
mediaSource.addEventListener("sourceopen", res, { once: true })
);
const sourceBuffer = mediaSource.addSourceBuffer(mimeType);
const recorder = new MediaRecorder(stream, { mimeType });
const chunks = [];
recorder.ondataavailable = async ({ data }) => {
if (mediaSource.readyState !== "open" || !data.size) {
return;
}
sourceBuffer.appendBuffer(await data.arrayBuffer());
};
delayed.pause();
recorder.start(50);
setTimeout(() => delayed.play(), delay);
})();
function getStream() {
// StackSnippet only:
// because StackSnippet don't allow the use of gUM
// we return a MediaStream from a simple <canvas> anim
const canvas = document.createElement("canvas");
const ctx = canvas.getContext("2d");
ctx.font = "30px sans-serif";
ctx.textAlign = "center";
ctx.textBaseline = "middle";
function anim() {
ctx.fillStyle = "white";
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = "black";
ctx.fillText(new Date().toTimeString().split(" ")[0], canvas.width / 2, canvas.height / 2);
requestAnimationFrame(anim);
}
anim();
return canvas.captureStream();
}
Realtime:<br>
<video id="realtime" autoplay muted controls></video><br> Delayed:<br>
<video id="delayed" autoplay muted controls></video>
和as a fiddle因为 StackSnippets 对 gUM 不太友好。
关于javascript - 显示延迟的网络摄像头/摄像头流 - webrtc,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/55756462/