Des interfaces futuristes utilisant des APIs web

1,460 views

Published on

Published in: Technology
0 Comments
6 Likes
Statistics
Notes
  • Be the first to comment

No Downloads
Views
Total views
1,460
On SlideShare
0
From Embeds
0
Number of Embeds
80
Actions
Shares
0
Downloads
12
Comments
0
Likes
6
Embeds 0
No embeds

No notes for slide
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • \n
  • Des interfaces futuristes utilisant des APIs web

    1. 1. Créez les interfaces du futur avec les APIs d’aujourd’hui
    2. 2. Je suis web opener chez
    3. 3. Deux interfaces futuristes utilisant des APIs web
    4. 4. + web sockets + device orientation = + du WebGL!!
    5. 5. server.js α, β, ɣ α, β, ɣremote.js teapot.js
    6. 6. web sockets
    7. 7. remote.js: var websocketServerUrl = ws://10.112.0.139:8080/; window.addEventListener(DOMContentLoaded, function init() {   //init websocket connections   //device orientation sync socket   var ws = new WebSocket(websocketServerUrl);   ws.onopen = function() {     ws.opened = true;   };   //listen to device orientation   window.addEventListener(deviceorientation, function(e) {     if (ws.opened) {       ws.send(JSON.stringify({         alpha: e.alpha,         beta: e.beta,         gamma: e.gamma       }));     }   }); });
    8. 8. server.js: // ws server var ws = require(websocket-server); var wsServer = ws.createServer(); wsServer.addListener(connection, function(connection){   connection.addListener(message, function(msg) {     wsServer.broadcast(msg);   }); }); wsServer.listen(8080);
    9. 9. teapot.js: window.addEventListener(DOMContentLoaded, function init() {   //connect to server using websockets   var ws = new WebSocket(ws://10.112.0.139:8080/);   ws.onopen = function() {     ws.onmessage = function(e) {       var data = JSON.parse(e.data),           avalue = data.alpha / 180 * Math.PI,           bvalue = data.beta / 180 * Math.PI,           gvalue = data.gamma / 180 * Math.PI;                  teapot.rotation.set(gvalue, avalue, -bvalue);      };    }; });
    10. 10. socket.io
    11. 11. device orientation
    12. 12. remote.js:   //listen to device orientation   window.addEventListener(deviceorientation, function(e) {     angles.innerHTML = alpha: + e.alpha + , beta: + e.beta + , gamma: + e.gamma;     if (ws.opened) {       ws.send(JSON.stringify({         alpha: e.alpha,         beta: e.beta,         gamma: e.gamma       }));     }   });
    13. 13. source: http://lists.w3.org/Archives/Public/public-geolocation/2012Jun/0000.html ↑N ↑up ↑upAll Android-based test results shown belowwere obtained from aHTC One X running Android 4.0. All iOS- α β ɣbased test results were obtainedfrom an Apple iPad running iOS 5.1. 270 0 0 Chrome beta for 360/0___|___180 -90___|___90 -90/270___|___90 Android | | | 90 0 180 360/0 0 0 270___|___90 -90___|___90 -90___|___90 FF mobile for Android | | | 180 -180/180 0 0/360 0 0 Opera Mobile for 90___|___270 -90___|___90 -90___|___90 Android | | | 180 -180/180 0 90 0 0 180___|___0/360 -90___|___90 -90___|___90 Safari for iOS | | | 270 0 -180/180 0/360 0 0 Specification 90___|___270 -90___|___90 -90___|___90 | | | 180 -180/180 0
    14. 14. et tout ça n’est que pour un dispositif!
    15. 15. 1. shim: gist.github.com/2966043 (crée par richtr)2. étalonnage fait à travers d’une UI
    16. 16. WebGL
    17. 17. three.js
    18. 18. // scene sizevar WIDTH = 724, HEIGHT = 512;// get the DOM element to attach tovar container = $(container);// create a WebGL renderer, set its size and append it to the DOMvar renderer = new THREE.WebGLRenderer();renderer.setSize(WIDTH, HEIGHT);renderer.setClearColorHex(0x111111, 1);renderer.clear();container.appendChild(renderer.domElement);// create a scenevar scene = new THREE.Scene();
    19. 19. // camera settings: fov, aspect ratio, near, farvar FOV = 45, ASPECT = WIDTH / HEIGHT, NEAR = 0.1, FAR = 10000;// create a camera and position camera on z axis (starts at 0,0,0)var camera = new THREE.PerspectiveCamera( FOV, ASPECT, NEAR, FAR);camera.position.z = 100;// add the camera to the scenescene.add(camera);// create some lights, position them and add it to the scenevar spotlight = new THREE.SpotLight();spotlight.position.set( 170, 330, -160 );scene.add(spotlight);ambilight = new THREE.AmbientLight(0x333333);scene.add(ambilight);//enable shadows on the rendererrenderer.shadowMapEnabled = true;
    20. 20. // add an object (teapot) to the scenevar teapot;var loader = new THREE.JSONLoader(),  createScene = function createScene( geometry ) {      var material = new THREE.MeshFaceMaterial();      teapot = new THREE.Mesh( geometry, material );       teapot.scale.set(8, 8, 8);         teapot.position.set( 0, -10, 0 );      scene.add( teapot );          console.log(matrix + teapot.matrix);    console.log(rotation + teapot.rotation.x);  };loader.load(teapot-model.js, createScene );// drawrenderer.render(scene, camera);animate();//animatefunction animate() {    requestAnimationFrame(animate);    renderer.render(scene, camera);}
    21. 21. canvas 2D?
    22. 22. + getUserMedia = + du WebGL!!
    23. 23. getUserMedia
    24. 24. <video id="camera" autoplay></video>var video = document.getElementById("camera");navigator.getUserMedia({ video: true }, function(stream) {    video.src = window.URL.createObjectURL(stream) || stream;}, function() {    //error...}); ** vous devez ajouter ces deux lignes pour que vôtre code marche dans tous les navigateurs navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
    25. 25. headtrackr.js
    26. 26. <canvas id="inputCanvas" width="320" height="240"style="display:none"></canvas><video id="inputVideo" autoplay loop></video><script>  var videoInput = document.getElementById(inputVideo);  var canvasInput = document.getElementById(inputCanvas);  var htracker = new headtrackr.Tracker();  htracker.init(videoInput, canvasInput);  htracker.start();</script>
    27. 27. // set up camera controller for head-coupled perspectiveheadtrackr.controllers.three.realisticAbsoluteCameraControl(camera, 27, [0,0,50], new THREE.Vector3(0,0,0), {damping : 0.5}); * @param {THREE.PerspectiveCamera} camera * @param {number} scaling size of screen in 3d-model relative tovertical size of computer screen in real life * @param {array} fixedPosition array (x,y,z) w/ the position ofthe real life screen in the 3d-model space coordinates * @param {THREE.Vector3} lookAt the object/position the camerashould be pointed towards * @param {object} params optional object with optional parameters
    28. 28. document.addEventListener(headtrackingEvent, function(event) { scene.fog = new THREE.Fog(0x000000, 1+(event.z*27), 3000+(event.z*27));}, false);* x : position of head in cms right of camera as seen fromusers point of view (see figure)* y : position of head in cms above camera (see figure)* z : position of head in cms distance from camera (see figure)
    29. 29. WebGL
    30. 30. three.js
    31. 31. //top wallplane1 = new THREE.Mesh(new THREE.PlaneGeometry(500, 3000, 5, 15), new THREE.MeshBasicMaterial({color: 0xcccccc, wireframe : true }));plane1.rotation.x = Math.PI/2;plane1.position.y = 250;plane1.position.z = 50-1500;scene.add(plane1);
    32. 32.  var geometry = new THREE.Geometry();    geometry.vertices.push( new THREE.Vertex(new THREE.Vector3(0, 0, -80000)));    geometry.vertices.push(new THREE.Vertex( new THREE.Vector3(0, 0, z)));    var line = new THREE.Line(geometry, new THREE.LineBasicMaterial({color: 0xeeeeee }));    line.position.x = x;    line.position.y = y;    scene.add(line);
    33. 33. github.com/luzc/wiimoteauduno.github.com/headtrackr/examples/targets.htmlgithub.com/auduno/headtrackr
    34. 34. shinydemos.com/touch-trackergithub.com/operasoftware
    35. 35. @gerbillegithub.com/luzcdev.opera.com

    ×