use vr::{self, AsMatrix4, VR}; extern crate gfx_device_gl; extern crate piston_window; extern crate openvr_sys; use gfx; use gfx::Device; use gfx::traits::FactoryExt; use na; use self::piston_window::{PistonWindow, Window}; pub type ColorFormat = gfx::format::Srgba8; pub type DepthFormat = gfx::format::DepthStencil; const NEAR: f32 = 0.01; const FAR: f32 = 3072.0; gfx_constant_struct! { Trans { viewmodel: [[f32; 4]; 4] = "u_viewmodel", matrix: [[f32; 4]; 4] = "u_matrix", } } pub struct ViewRoot where Dev: gfx::Device, T: gfx::format::RenderFormat + gfx::format::TextureFormat, D: gfx::format::DepthFormat + gfx::format::TextureFormat { left: Option>, right: Option>, trans: gfx::handle::Buffer, } impl ViewRoot { pub fn create_view(window: &mut PistonWindow, vr: &Option) -> ViewRoot { if let &Some(ref vr) = vr { let render_size = vr.recommended_render_target_size(); let render_size = (render_size.0 * 100 / 100, render_size.1 * 100 / 100); let left = vr::create_eyebuffer(&mut window.factory, render_size.0, render_size.1) .expect("create left renderbuffer"); let right = vr::create_eyebuffer(&mut window.factory, render_size.0, render_size.1) .expect("create right renderbuffer"); let trans = window.factory.create_constant_buffer(1); window.window.swap_buffers(); // To contain setup calls to Frame 0 in apitrace ViewRoot:: { left: Some(left), right: Some(right), trans: trans, } } else { let trans = window.factory.create_constant_buffer(1); ViewRoot:: { left: None, right: None, trans: trans, } } } pub fn draw(&self, window: &mut PistonWindow, vr: &mut Option, scene: &dyn (::scene::Scene)) -> Result<(), vr::Error> { if let &mut Some(ref mut vr) = vr { // Get the current sensor state let poses = vr.poses().expect("vr poses"); let mut hmd_mat = poses[0].device_to_absolute_tracking().as_matrix4(); let inv_worked = hmd_mat.try_inverse_mut(); assert!(inv_worked, "hmd matrix invert"); for &(eye, buffers) in [(vr::Eye::Left, &self.left), (vr::Eye::Right, &self.right)].iter() { let target = &buffers.as_ref().expect("vr color buffer").target; let depth = &buffers.as_ref().expect("vr depth buffer").depth; let proj_mat = vr.projection_matrix(eye, NEAR, FAR); let eye_mat = vr.head_to_eye_transform(eye); let scene_mat = scene.origin(); let viewmodel_mat = eye_mat * hmd_mat * scene_mat; let trans = Trans { viewmodel: *viewmodel_mat.as_ref(), matrix: *(proj_mat * viewmodel_mat).as_ref() }; window.encoder.update_constant_buffer(&self.trans, &trans); scene.render(&mut window.factory, &mut window.encoder, &self.trans, &target, &depth); } } else { // If running without VR, just draw from some default projection near the scene origin let head_mat = na::Similarity3::new(na::Vector3::new(0.0, -1.5, 0.0), na::Vector3::new(0.0, 0.0, 0.0), 1.0).to_homogeneous(); let proj_mat = na::geometry::Perspective3::new(1.0, 90.0, NEAR, FAR); let scene_mat = scene.origin(); let viewmodel_mat = scene.mouselook() * head_mat * scene_mat; let trans = Trans { viewmodel: *viewmodel_mat.as_ref(), matrix: *(proj_mat.as_matrix() * viewmodel_mat).as_ref() }; window.encoder.update_constant_buffer(&self.trans, &trans); } // draw monitor window scene.render(&mut window.factory, &mut window.encoder, &self.trans, &window.output_color, &window.output_stencil); window.encoder.flush(&mut window.device); if let (&mut Some(ref mut vr), &Some(ref left), &Some(ref right)) = (vr, &self.left, &self.right) { vr.submit(vr::Eye::Left, &left.tex)?; vr.submit(vr::Eye::Right, &right.tex)?; } window.window.swap_buffers(); window.device.cleanup(); Ok(()) } }