Manipulate a webcam stream as a WebGL texture?
Asked Answered
D

2

7

What's the most efficient way to acquire the webcam stream directly into WebGL as a texture?

The standard process (as documented in Three.js) is to use the video tag, then manipulate it in a canvas tag and then from canvas to WebGL.

This increases CPU overhead and Chrome spins up a worker to send image data from the camera to the GPU (e.g. 30 times per second).

Is there any way to do this more efficiently and reduce rendering/processing on the CPU?

Diverting answered 17/6, 2019 at 22:51 Comment(2)
You don't need the video to canvas part, otherwise yes, there is currently no other way to access the stream's video data than decoding through a video element. Might change in the future though.Kana
Very related: #36691465Kana
T
1

The easiest way would be to simply put the stream in the video HTML element and then transform it into the texture. This way results in low fps. Luckily there is a library that does it in a quick way. I don't understand how exactly though (probably extract the texture from the stream itself, or optimize the first method).

https://p5js.org/examples/dom-video-capture.html https://p5js.org/examples/3d-shader-using-webcam.html#

You can check source code to understand how they do it. https://github.com/processing/p5.js/

When I compared these two approaches, the second one resulted in 60fps, while the first one was 20fps for my shader.

Totem answered 9/11, 2021 at 17:56 Comment(0)
L
1

The WebGL function to update texture data, gl.texImage2D(), will accept a <video>...<video/> page element directly as a buffer source for its last argument.

So, all you need to do is

  • set up your WebGL pipeline,
  • ask the user for permission to capture their webcam, and then
  • start an animation loop that repeatedly calls gl.texImage2D() to move data from the camera to the texture, before rendering your scene.

I tested this code Mozilla Firefox 122.0.1. I've deliberately omitted error checking, hard-coded some of the WebGL argument indices, and browser compatability code for sake of brevity:

<!DOCTYPE html>
<html><head><script>
function main() {
    // Capture webcam input using invisible `video` element
    // Adapted from p5js.org/examples/3d-shader-using-webcam.html
    let camera = document.getElementById("camera");
    
    // Ask user permission to record their camera
    navigator.mediaDevices.getUserMedia({video:1,audio:0}).then(
        (stream)=>{ try {
            if ('srcObject' in camera) camera.srcObject = stream;
            else camera.src = window.URL.createObjectURL(stream);
        } catch (err) {camera.src = stream;}},console.log);
    camera.play();
    
    // 512×512 Canvas with WebGL context
    var canvas = document.getElementById("maincanvas");
    var gl = canvas.getContext("webgl");
    canvas.width = canvas.height = 512;
    gl.viewport(0, 0, canvas.width, canvas.height);

    // Vertex shader: Identity map
    var vshader = gl.createShader(gl.VERTEX_SHADER);
    gl.shaderSource(vshader,
        "attribute vec2 p;"+
        "void main(){"+
        "    gl_Position = vec4(p,0,1);"+
        "}");
    gl.compileShader(vshader); 
    
    // Fragment shader: sample video texture, change colors
    var fshader = gl.createShader(gl.FRAGMENT_SHADER);
    gl.shaderSource(fshader,
        "uniform sampler2D data; "+
        "void main() {"+
        "    gl_FragColor=texture2D(data,gl_FragCoord.xy/vec2(512,512)).zxyw;"+
        "}");
    gl.compileShader(fshader); 
    
    // Create and link program
    var program  = gl.createProgram();
    gl.attachShader(program,vshader);
    gl.attachShader(program,fshader);
    gl.linkProgram(program);   
    gl.useProgram(program);

    // Vertices: A screen-filling quad made from two triangles
    gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer());
    gl.bufferData(gl.ARRAY_BUFFER,new Float32Array([-1,-1,1,-1,-1,1,-1,1,1,-1,1,1]),gl.STATIC_DRAW);
    gl.enableVertexAttribArray(0);
    gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
    
    // Texture to contain the video data
    var texture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, texture);
    gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); 
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
    
    // Bind texture to the "data" argument to the fragment shader
    var param = gl.getActiveUniform(program,0); // data bind point
    gl.uniform1i(gl.getUniformLocation(program,"data"),0);
    gl.activeTexture(gl.TEXTURE0);
    gl.bindTexture(gl.TEXTURE_2D,texture);
    
    // Repeatedly pull camera data and render
    function animate(){
        gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, camera);
        gl.drawArrays(gl.TRIANGLES, 0, 6);
        requestAnimationFrame(animate);
    }
    animate();
}
</script></head>
<body onload="javascript:main()">
<canvas id='maincanvas' style="width:512px;height:512px;"></canvas>
<video id='camera' visible="False" style="width: 512px; height: 512px; display:none;" controls="true" playsinline="" crossorigin="anonymous"></video>
</body>
</html>
Lajuanalake answered 10/2 at 18:54 Comment(0)

© 2022 - 2024 — McMap. All rights reserved.