如何在WebGL中从HTML5播放器绘制视频并将其投影到3D平面上? [英] How can I draw and project a video from an HTML5 player onto a 3D plane in WebGL?
问题描述
我正在使用Unity将我的应用程序构建为WebGL,我发现自己需要从HTML播放器中获取视频,并将其绘制在3D空间的平面上。
我知道您可以在CanvasRenderingContext2D
上调用drawImage()
并传入对视频播放器的引用,当函数运行时,当前帧将被绘制到画布上。
我所能找到的与该函数最接近的3D等价物是WebGL2RenderingContext.texImage3D()
。然而,我并不完全了解它的工作原理,当我尝试测试它时,收到了以下异常:Uncaught DOMException: The operation is insecure.
我使用的是我自己的本地视频文件,所以它不可能是CORS,但我不知道它是什么。
您可以在this GitHub repo中查看测试项目。
我发现了一个类似的问题here,但不幸的是,答案显示了如何绘制我认为是预加载的纹理。我不知道如何从视频播放器中抓取它们并传递它们,或者这是否足够快,可以每一帧都完成。
为了给出一些背景,我试图在我的Unity/WebGL应用程序中显示一个HLS实况流。我可以下载.ts
(mpeg-2传输流)视频片段,并将它们排成一个连贯的视频流,但Unity的内置视频播放器不支持这种格式。
作为解决方案,我想我可以在HTML5播放器中抓取视频(如果需要,可以使用hls.js),然后使用JavaScript将纹理注入到WebGL应用程序中,每一帧。
Unity允许您从其C#脚本内部运行JavaScript代码,因此时间可能不是问题,也不会获得目标平面的世界比例/位置。我只需要编写JavaScript函数来以某种方式绘制纹理。
这里是my current code:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebGL</title>
<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.10.1/dist/ffmpeg.min.js"></script>
<style>
body {
background-color: aquamarine;
}
</style>
</head>
<body>
<video muted autoplay width="480" height="270">
<source src="./test.mp4" type="video/mp4" />
</video>
<br>
<canvas width="1080" height="720"></canvas>
<button onclick="takeScreenshot()">Capture</button>
<script>
function takeScreenshot() {
var video = document.querySelector("video");
var canvas = document.querySelector("canvas");
var gl = canvas.getContext("webgl2");
gl.texImage3D(
gl.TEXTURE_3D, // target (enum)
0, // level of detail
gl.RGBA, // internalFormat
1920, // width of texture
1080, // height of texture
1, // depth
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
video, // source
);
}
</script>
</body>
</html>
WebGL
以下是设置可以接收视频像素的推荐答案对象的示例代码。希望对你有用。
基本上,您使用两个三角形创建一个框/矩形形状,然后将视频投影到该矩形上。
0-------1
| |
3-------2
//# two sets of... connected 3-points of a triangle
var vertexIndices = [ 0, 1, 2, 0, 2, 3, ];
下面的示例代码还创建了一些必需的GPU着色器和程序。试用它如果任何程序员只想做GPU像素效果,请将效果代码写入片段着色器。
(请参阅//# example of basic colour effect
上的代码部分)。
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebGL</title>
<!--
<script src="https://unpkg.com/@ffmpeg/ffmpeg@0.10.1/dist/ffmpeg.min.js"></script>
-->
<style> body {background-color: aquamarine; } </style>
</head>
<body>
<video id="myVideo" controls muted autoplay width="480" height="270">
<source src="video.mp4" type="video/mp4" />
</video>
<br>
<button id="capture" onclick="takeScreenshot()"> Capture </button>
<br><br>
<!--
<canvas id="myCanvas" width="1080" height="720"></canvas>
-->
<canvas id="myCanvas" width="480" height="270"></canvas>
<!-- ########## Shader code ###### -->
<!-- ### Shader code here -->
<!-- Fragment shader program -->
<script id="shader-fs" type="x-shader/x-fragment">
//<!-- //## code for pixel effects goes here if needed -->
//# these two vars will access
varying mediump vec2 vDirection;
uniform sampler2D uSampler;
void main(void)
{
//# get current video pixel's color (no FOR-loops needed like in JS Canvas)
gl_FragColor = texture2D(uSampler, vec2(vDirection.x * 0.5 + 0.5, vDirection.y * 0.5 + 0.5));
/*
//# example of basic colour effect
gl_FragColor.r = ( gl_FragColor.r * 1.15 );
gl_FragColor.g = ( gl_FragColor.g * 0.8 );
gl_FragColor.b = ( gl_FragColor.b * 0.45 );
*/
}
</script>
<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">
attribute mediump vec2 aVertexPosition;
varying mediump vec2 vDirection;
void main( void )
{
gl_Position = vec4(aVertexPosition, 1.0, 1.0) * 2.0;
vDirection = aVertexPosition;
}
</script>
<!-- ### END Shader code... -->
<script>
//# WebGL setup
var video = document.getElementById('myVideo');
const glcanvas = document.getElementById('myCanvas');
const gl = ( ( glcanvas.getContext("webgl") ) || ( glcanvas.getContext("experimental-webgl") ) );
//# check if WebGL is available..
if (gl && gl instanceof WebGLRenderingContext) { console.log( "WebGL is available"); }
else { console.log( "WebGL is NOT available" ); } //# use regular JS canvas functions if this happens...
//# create and attach the shader program to the webGL context
var attributes, uniforms, program;
function attachShader( params )
{
fragmentShader = getShaderByName(params.fragmentShaderName);
vertexShader = getShaderByName(params.vertexShaderName);
program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS))
{ alert("Unable to initialize the shader program: " + gl.getProgramInfoLog(program)); }
gl.useProgram(program);
// get the location of attributes and uniforms
attributes = {};
for (var i = 0; i < params.attributes.length; i++)
{
var attributeName = params.attributes[i];
attributes[attributeName] = gl.getAttribLocation(program, attributeName);
gl.enableVertexAttribArray(attributes[attributeName]);
}
uniforms = {};
for (i = 0; i < params.uniforms.length; i++)
{
var uniformName = params.uniforms[i];
uniforms[uniformName] = gl.getUniformLocation(program, uniformName);
gl.enableVertexAttribArray(attributes[uniformName]);
}
}
function getShaderByName( id )
{
var shaderScript = document.getElementById(id);
var theSource = "";
var currentChild = shaderScript.firstChild;
while(currentChild)
{
if (currentChild.nodeType === 3) { theSource += currentChild.textContent; }
currentChild = currentChild.nextSibling;
}
var result;
if (shaderScript.type === "x-shader/x-fragment")
{ result = gl.createShader(gl.FRAGMENT_SHADER); }
else { result = gl.createShader(gl.VERTEX_SHADER); }
gl.shaderSource(result, theSource);
gl.compileShader(result);
if (!gl.getShaderParameter(result, gl.COMPILE_STATUS))
{
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(result));
return null;
}
return result;
}
//# attach shader
attachShader({
fragmentShaderName: 'shader-fs',
vertexShaderName: 'shader-vs',
attributes: ['aVertexPosition'],
uniforms: ['someVal', 'uSampler'],
});
// some webGL initialization
gl.clearColor(0.0, 0.0, 0.0, 0.0);
gl.clearDepth(1.0);
gl.disable(gl.DEPTH_TEST);
positionsBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionsBuffer);
var positions = [
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0,
-1.0, 1.0,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
var vertexColors = [0xff00ff88,0xffffffff];
var cBuffer = gl.createBuffer();
verticesIndexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, verticesIndexBuffer);
var vertexIndices = [ 0, 1, 2, 0, 2, 3, ];
gl.bufferData(
gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(vertexIndices), gl.STATIC_DRAW
);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
//# must be LINEAR to avoid subtle pixelation (double-check this... test other options like NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
// update the texture from the video
updateTexture = function()
{
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
//# next line fails in Safari if input video is NOT from same domain/server as this html code
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB,
gl.UNSIGNED_BYTE, video);
gl.bindTexture(gl.TEXTURE_2D, null);
};
</script>
<script>
//# Vars for video frame grabbing when system/browser provides a new frame
var requestAnimationFrame = (window.requestAnimationFrame || window.mozRequestAnimationFrame ||
window.webkitRequestAnimationFrame || window.msRequestAnimationFrame);
var cancelAnimationFrame = (window.cancelAnimationFrame || window.mozCancelAnimationFrame);
///////////////////////////////////////////////
function takeScreenshot( )
{
//# video is ready (can display pixels)
if( video.readyState >= 3 )
{
updateTexture(); //# update pixels with current video frame's pixels...
gl.useProgram(program); //# apply our program
gl.bindBuffer(gl.ARRAY_BUFFER, positionsBuffer);
gl.vertexAttribPointer(attributes['aVertexPosition'], 2, gl.FLOAT, false, 0, 0);
//# Specify the texture to map onto the faces.
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.uniform1i(uniforms['uSampler'], 0);
//# Draw GPU
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, verticesIndexBuffer);
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0);
}
//# re-capture the next frame... basically the function loops itself
//# consider adding event listener for video pause to set value as... cancelAnimationFrame( takeScreenshot );
requestAnimationFrame( takeScreenshot );
}
//////////////////////////////////////
function takeScreenshot_old()
{
var gl = canvas.getContext("webgl2");
gl.texImage3D(
gl.TEXTURE_3D, // target (enum)
0, // level of detail
gl.RGBA, // internalFormat
1920, // width of texture
1080, // height of texture
1, // depth
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
video, // source
);
}
</script>
</body>
</html>
这篇关于如何在WebGL中从HTML5播放器绘制视频并将其投影到3D平面上?的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!