And as mrdoob mentioned, video.play() should be explicit on mobile devices through user initiated events such as click. Having done this, I have been able to play the file. However I am only able to hear the audio part of the video. The screen is still blank. Moreover, I see the following exception on the dev console.
Uncaught SecurityError: Failed to execute 'texImage2D' on 'WebGLRenderingContext': The video element contains cross-origin data, and may not be loaded. three.min.js:507
I am not loading any image through my code as you can see in the code below and the error seems to be thrown in three.min.js file. (The code below is a slightly modified version of http://threejs.org/examples/#webgl_materials_video to make it run on mobile device)
Please note that this happens only on mobile device. The example works flawlessly on the web.
Your help is much appreciated!
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - materials - video</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
background-color: #000;
color: #fff;
margin: 0px;
overflow: hidden;
font-family:Monospace;
font-size:13px;
text-align:center;
font-weight: bold;
text-align:center;
}
a {
color:#0078ff;
}
#info {
color:#fff;
position: absolute;
top: 0px; width: 100%;
padding: 5px;
z-index:100;
}
</style>
</head>
<body>
<div id="info">
<a href="http://threejs.org" target="_blank">three.js</a> - webgl video demo. playing <a href="http://durian.blender.org/" target="_blank">sintel</a> trailer
</div>
<script src="js/three.min.js"></script>
<script src="js/shaders/ConvolutionShader.js"></script>
<script src="js/shaders/CopyShader.js"></script>
<script src="js/postprocessing/EffectComposer.js"></script>
<script src="js/postprocessing/RenderPass.js"></script>
<script src="js/postprocessing/MaskPass.js"></script>
<script src="js/postprocessing/BloomPass.js"></script>
<script src="js/postprocessing/ShaderPass.js"></script>
<script src="js/Detector.js"></script>
<video id="video" autoplay loop style="display:none">
<source src="textures/sintel.mp4" type='video/mp4; codecs="avc1.42E01E, mp4a.40.2"'>
<source src="textures/sintel.ogv" type='video/ogg; codecs="theora, vorbis"'>
</video>
<script>
if ( ! Detector.webgl ) Detector.addGetWebGLMessage();
var container;
var camera, scene, renderer, element;
var video, texture, material, mesh;
var composer;
var mouseX = 0;
var mouseY = 0;
var windowHalfX = window.innerWidth / 2;
var windowHalfY = window.innerHeight / 2;
var cube_count,
meshes = [],
materials = [],
xgrid = 20,
ygrid = 10;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
camera = new THREE.PerspectiveCamera( 40, window.innerWidth / window.innerHeight, 1, 10000 );
camera.position.z = 500;
scene = new THREE.Scene();
var light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 0.5, 1, 1 ).normalize();
scene.add( light );
renderer = new THREE.WebGLRenderer( { antialias: false } );
renderer.setSize( window.innerWidth, window.innerHeight );
element=renderer.domElement;
element.addEventListener('click', fullscreen, false);
container.appendChild( renderer.domElement );
video = document.getElementById( 'video' );
texture = new THREE.Texture( video );
texture.minFilter = THREE.LinearFilter;
texture.magFilter = THREE.LinearFilter;
texture.format = THREE.RGBFormat;
texture.generateMipmaps = false;
//
var i, j, ux, uy, ox, oy,
geometry,
xsize, ysize;
ux = 1 / xgrid;
uy = 1 / ygrid;
xsize = 480 / xgrid;
ysize = 204 / ygrid;
var parameters = { color: 0xffffff, map: texture },
material_base = new THREE.MeshLambertMaterial( parameters );
renderer.initMaterial( material_base, scene.__lights, scene.fog );
cube_count = 0;
for ( i = 0; i < xgrid; i ++ )
for ( j = 0; j < ygrid; j ++ ) {
ox = i;
oy = j;
geometry = new THREE.BoxGeometry( xsize, ysize, xsize );
change_uvs( geometry, ux, uy, ox, oy );
materials[ cube_count ] = new THREE.MeshLambertMaterial( parameters );
material = materials[ cube_count ];
material.hue = i/xgrid;
material.saturation = 1 - j/ygrid;
material.color.setHSL( material.hue, material.saturation, 0.5 );
mesh = new THREE.Mesh( geometry, material );
mesh.position.x = ( i - xgrid/2 ) * xsize;
mesh.position.y = ( j - ygrid/2 ) * ysize;
mesh.position.z = 0;
mesh.scale.x = mesh.scale.y = mesh.scale.z = 1;
scene.add( mesh );
mesh.dx = 0.001 * ( 0.5 - Math.random() );
mesh.dy = 0.001 * ( 0.5 - Math.random() );
meshes[ cube_count ] = mesh;
cube_count += 1;
}
renderer.autoClear = false;
document.addEventListener( 'mousemove', onDocumentMouseMove, false );
// postprocessing
var renderModel = new THREE.RenderPass( scene, camera );
var effectBloom = new THREE.BloomPass( 1.3 );
var effectCopy = new THREE.ShaderPass( THREE.CopyShader );
effectCopy.renderToScreen = true;
composer = new THREE.EffectComposer( renderer );
composer.addPass( renderModel );
composer.addPass( effectBloom );
composer.addPass( effectCopy );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function fullscreen() {
video.play();
console.log(video);
if (container.requestFullscreen) {
container.requestFullscreen();
} else if (container.msRequestFullscreen) {
container.msRequestFullscreen();
} else if (container.mozRequestFullScreen) {
container.mozRequestFullScreen();
} else if (container.webkitRequestFullscreen) {
container.webkitRequestFullscreen();
}
}
function onWindowResize() {
windowHalfX = window.innerWidth / 2;
windowHalfY = window.innerHeight / 2;
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
composer.reset();
}
function change_uvs( geometry, unitx, unity, offsetx, offsety ) {
var faceVertexUvs = geometry.faceVertexUvs[ 0 ];
for ( var i = 0; i < faceVertexUvs.length; i ++ ) {
var uvs = faceVertexUvs[ i ];
for ( var j = 0; j < uvs.length; j ++ ) {
var uv = uvs[ j ];
uv.x = ( uv.x + offsetx ) * unitx;
uv.y = ( uv.y + offsety ) * unity;
}
}
}
function onDocumentMouseMove(event) {
mouseX = ( event.clientX - windowHalfX );
mouseY = ( event.clientY - windowHalfY ) * 0.3;
}
//
function animate() {
requestAnimationFrame( animate );
render();
}
var h, counter = 1;
function render() {
var time = Date.now() * 0.00005;
camera.position.x += ( mouseX - camera.position.x ) * 0.05;
camera.position.y += ( - mouseY - camera.position.y ) * 0.05;
camera.lookAt( scene.position );
if ( video.readyState === video.HAVE_ENOUGH_DATA ) {
if ( texture ) texture.needsUpdate = true;
}
for ( i = 0; i < cube_count; i ++ ) {
material = materials[ i ];
h = ( 360 * ( material.hue + time ) % 360 ) / 360;
material.color.setHSL( h, material.saturation, 0.5 );
}
if ( counter % 1000 > 200 ) {
for ( i = 0; i < cube_count; i ++ ) {
mesh = meshes[ i ];
mesh.rotation.x += 10 * mesh.dx;
mesh.rotation.y += 10 * mesh.dy;
mesh.position.x += 200 * mesh.dx;
mesh.position.y += 200 * mesh.dy;
mesh.position.z += 400 * mesh.dx;
}
}
if ( counter % 1000 === 0 ) {
for ( i = 0; i < cube_count; i ++ ) {
mesh = meshes[ i ];
mesh.dx *= -1;
mesh.dy *= -1;
}
}
counter ++;
renderer.clear();
composer.render();
}
</script>
</body>
</html>
But this alone won't do the trick. This has to be set before .load() and .play(). I had this after .load() and it did not work. So do it like this:
video.setAttribute('crossorigin', 'anonymous');
video.src = "video/test.m4v";
video.src = "video/test.mp4";
video.load(); // must call after setting/changing source
video.play();
This is solution is for videos that don't play on mobile devices specifically due to the security error seen as above.
Just to be clear crossorigin settings tell the browser to ask the server for permission to use the video. The server still has to give that permission. If the server doesn't give permission setting the crossorigin attribute won't let you use the video.