Vídeo VR en el navegador
Raúl Jiménez
Un poco de autobombo
Toptal Partner
Angular GDE
videogular
Google Partner
¿Qué es el vídeo VR?
-
Archivo de vídeo equirectangular 360
-
Reproductor de vídeo compatible
-
Hardware compatible (las gafas)
-
Hardware de selección (punteros)
VR es Realidad Virtual
Vídeo 360
Reproductores de vídeo
-
Youtube (solo app nativa)
-
eleVR (webVR y emulación)
-
Videogular2 (emulación)
-
Video.js (emulación)
Hardware compatible
WebVR vs Emulación
-
API web nativa para dispositivos VR
-
Soporte para Windows y dispositivos Android/iOS
-
Navegadores Chromium Experimental, Firefox Nightly y Samsung Gear VR
-
Emulación a través de WebGL
-
Cargar vídeo y pintarlo en canvas
-
Rendimiento pobre en móviles
-
Rendimiento muy bueno en PCs
Emulación
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - equirectangular video panorama</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
background-color: #000000;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 0px; width: 100%;
color: #ffffff;
padding: 5px;
font-family:Monospace;
font-size:13px;
font-weight: bold;
text-align:center;
}
a {
color: #ffffff;
}
</style>
</head>
<body>
<div id="container"></div>
<script src="../build/three.js"></script>
<script>
var camera, scene, renderer;
var texture_placeholder,
isUserInteracting = false,
onMouseDownMouseX = 0, onMouseDownMouseY = 0,
lon = 0, onMouseDownLon = 0,
lat = 0, onMouseDownLat = 0,
phi = 0, theta = 0,
distance = 500;
init();
animate();
function init() {
var container, mesh;
container = document.getElementById( 'container' );
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 1100 );
camera.target = new THREE.Vector3( 0, 0, 0 );
scene = new THREE.Scene();
var geometry = new THREE.SphereBufferGeometry( 500, 60, 40 );
geometry.scale( - 1, 1, 1 );
var video = document.createElement( 'video' );
video.width = 640;
video.height = 360;
video.autoplay = true;
video.loop = true;
video.src = "textures/pano.webm";
var texture = new THREE.VideoTexture( video );
texture.minFilter = THREE.LinearFilter;
texture.format = THREE.RGBFormat;
var material = new THREE.MeshBasicMaterial( { map : texture } );
mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
document.addEventListener( 'mousedown', onDocumentMouseDown, false );
document.addEventListener( 'mousemove', onDocumentMouseMove, false );
document.addEventListener( 'mouseup', onDocumentMouseUp, false );
document.addEventListener( 'mousewheel', onDocumentMouseWheel, false );
document.addEventListener( 'MozMousePixelScroll', onDocumentMouseWheel, false);
//
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function onDocumentMouseDown( event ) {
event.preventDefault();
isUserInteracting = true;
onPointerDownPointerX = event.clientX;
onPointerDownPointerY = event.clientY;
onPointerDownLon = lon;
onPointerDownLat = lat;
}
function onDocumentMouseMove( event ) {
if ( isUserInteracting === true ) {
lon = ( onPointerDownPointerX - event.clientX ) * 0.1 + onPointerDownLon;
lat = ( event.clientY - onPointerDownPointerY ) * 0.1 + onPointerDownLat;
}
}
function onDocumentMouseUp( event ) {
isUserInteracting = false;
}
function onDocumentMouseWheel( event ) {
// WebKit
if ( event.wheelDeltaY ) {
distance -= event.wheelDeltaY * 0.05;
// Opera / Explorer 9
} else if ( event.wheelDelta ) {
distance -= event.wheelDelta * 0.05;
// Firefox
} else if ( event.detail ) {
distance += event.detail * 1.0;
}
}
function animate() {
requestAnimationFrame( animate );
update();
}
function update() {
lat = Math.max( - 85, Math.min( 85, lat ) );
phi = THREE.Math.degToRad( 90 - lat );
theta = THREE.Math.degToRad( lon );
camera.position.x = distance * Math.sin( phi ) * Math.cos( theta );
camera.position.y = distance * Math.cos( phi );
camera.position.z = distance * Math.sin( phi ) * Math.sin( theta );
camera.lookAt( camera.target );
/*
// distortion
camera.position.copy( camera.target ).negate();
*/
renderer.render( scene, camera );
}
</script>
</body>
</html>
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webvr - video</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
<style>
body {
font-family: Monospace;
background-color: #f0f0f0;
margin: 0px;
overflow: hidden;
}
#info {
position: absolute;
top: 10px;
width: 100%;
color: #fff;
text-align: center;
}
a {
color: #ff0
}
</style>
</head>
<body>
<div id="container"></div>
<div id="info">
<a href="http://threejs.org" target="_blank">three.js</a> webgl - vr video<br />
stereoscopic panoramic render by <a href="http://pedrofe.com/rendering-for-oculus-rift-with-arnold/" target="_blank">pedrofe</a>. scene from <a href="http://www.meryproject.com/" target="_blank">mery project</a>.
</div>
<script src="../build/three.js"></script>
<script src="js/WebVR.js"></script>
<script src="js/effects/VREffect.js"></script>
<script src="js/controls/VRControls.js"></script>
<script>
if ( WEBVR.isLatestAvailable() === false ) {
document.body.appendChild( WEBVR.getMessage() );
}
//
var camera, scene, renderer;
var video, texture;
var controls, effect;
init();
animate();
function init() {
var container = document.getElementById( 'container' );
container.addEventListener( 'click', function () {
video.play();
} );
camera = new THREE.PerspectiveCamera( 75, window.innerWidth / window.innerHeight, 1, 2000 );
camera.layers.enable( 1 ); // render left view when no stereo available
// video
video = document.createElement( 'video' );
video.loop = true;
video.src = 'textures/MaryOculus.webm';
video.play();
texture = new THREE.VideoTexture( video );
texture.minFilter = THREE.NearestFilter;
texture.maxFilter = THREE.NearestFilter;
texture.format = THREE.RGBFormat;
texture.generateMipmaps = false;
scene = new THREE.Scene();
// left
var geometry = new THREE.SphereGeometry( 500, 60, 40 );
geometry.scale( - 1, 1, 1 );
var uvs = geometry.faceVertexUvs[ 0 ];
for ( var i = 0; i < uvs.length; i ++ ) {
for ( var j = 0; j < 3; j ++ ) {
uvs[ i ][ j ].x *= 0.5;
}
}
var material = new THREE.MeshBasicMaterial( { map: texture } );
var mesh = new THREE.Mesh( geometry, material );
mesh.rotation.y = - Math.PI / 2;
mesh.layers.set( 1 ); // display in left eye only
scene.add( mesh );
// right
var geometry = new THREE.SphereGeometry( 500, 60, 40 );
geometry.scale( - 1, 1, 1 );
var uvs = geometry.faceVertexUvs[ 0 ];
for ( var i = 0; i < uvs.length; i ++ ) {
for ( var j = 0; j < 3; j ++ ) {
uvs[ i ][ j ].x *= 0.5;
uvs[ i ][ j ].x += 0.5;
}
}
var material = new THREE.MeshBasicMaterial( { map: texture } );
var mesh = new THREE.Mesh( geometry, material );
mesh.rotation.y = - Math.PI / 2;
mesh.layers.set( 2 ); // display in right eye only
scene.add( mesh );
//
renderer = new THREE.WebGLRenderer();
renderer.setClearColor( 0x101010 );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( window.innerWidth, window.innerHeight );
container.appendChild( renderer.domElement );
//
controls = new THREE.VRControls( camera );
effect = new THREE.VREffect( renderer );
effect.scale = 0; // video doesn't need eye separation
effect.setSize( window.innerWidth, window.innerHeight );
if ( WEBVR.isAvailable() === true ) {
document.body.appendChild( WEBVR.getButton( effect ) );
}
//
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
effect.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
}
function render() {
controls.update();
effect.render( scene, camera );
}
</script>
</body>
</html>
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>360 Video</title>
<meta name="description" content="360 Video — A-Frame">
<script src="../../../dist/aframe.js"></script>
</head>
<body>
<a-scene>
<a-assets>
<video id="video" src="https://ucarecdn.com/bcece0a8-86ce-460e-856b-40dac4875f15/"
autoplay loop></video>
</a-assets>
<a-videosphere src="#video" rotation="0 180 0"></a-videosphere>
</a-scene>
</body>
</html>
<vg-player>
<vg-controls [autohide]="true" [autohideTime]="1.5">
<vg-play-pause></vg-play-pause>
<vg-time-display property="current" format="mm:ss"></vg-time-display>
<vg-scrub-bar>
<vg-scrub-bar-current-time></vg-scrub-bar-current-time>
<vg-scrub-bar-buffering-time></vg-scrub-bar-buffering-time>
</vg-scrub-bar>
<vg-time-display property="left" format="mm:ss"></vg-time-display>
<vg-mute></vg-mute>
<vg-fullscreen></vg-fullscreen>
</vg-controls>
<video src="video/pano.webm" #media vg-media id="360Video" preload="auto" loop></video>
<vg-360 [media]="media" [vr]="true" [hotSpots]="hotSpots" [pointer]="showPointer" (onEnterHotSpot)="onEnterHotSpot($event)" (onLeaveHotSpot)="onLeaveHotSpot($event)"></vg-360>
</vg-player>
<div class="info-dialog" [class.active]="isActive">
<div class="info-back"></div>
<span class="info-message">You can create interactivity on your hot spot areas! Check the console log!</span>
</div>
<div class="clone info-dialog" [class.active]="isActive">
<div class="info-back"></div>
<span class="info-message">You can create interactivity on your hot spot areas! Check the console log!</span>
</div>
import {Component, OnInit, ElementRef} from "angular2/core";
import {VgPlayer, VgMedia, Vg360, IHotSpot} from "videogular2/core";
import { VgControls, VgTimeDisplay, VgPlayPause, VgPlaybackButton, VgScrubBar, VgScrubBarCurrentTime, VgScrubBarBufferingTime, VgMute, VgFullscreen } from "videogular2/controls";
import {VgOverlayPlay} from "videogular2/overlay-play";
@Component({
selector: 'video-vr-player',
templateUrl: 'src/video-vr-player.html',
directives: [VgPlayer, VgMedia, VgOverlayPlay, VgControls, VgTimeDisplay, VgPlayPause, VgPlaybackButton, VgScrubBar, VgScrubBarCurrentTime, VgScrubBarBufferingTime, VgMute, VgFullscreen, Vg360]
})
export class VideoVrPlayer implements OnInit {
elem:any;
dialog:any;
dialogClone:any;
hotSpots:Array<IHotSpot>;
showPointer:boolean = true;
isActive:boolean = false;
constructor(ref:ElementRef) {
this.elem = ref.nativeElement;
}
ngOnInit() {
this.hotSpots = [];
this.dialog = this.elem.querySelector('.info-dialog');
this.dialogClone = this.elem.querySelector('.clone.info-dialog');
var hs:IHotSpot = <IHotSpot>{};
hs.element = this.dialog;
hs.elementClone = this.dialogClone;
hs.position = {
x: -200,
y: 0,
z: 150
};
hs.rotation = {
x: 0,
y: 47,
z: 0
};
this.hotSpots.push(hs);
}
onEnterHotSpot(object:IHotSpot) {
console.log('enter', object);
this.isActive = true;
}
onLeaveHotSpot(object:IHotSpot) {
console.log('leave', object);
this.isActive = false;
}
}
HTML
Angular2
¡Gracias!
¿preguntas?
Video VR en el navegador
By Raúl Jiménez
Video VR en el navegador
Charla para Modern Web Event en Bilbao 2016
- 2,674