diff --git a/src/bin/gl.rs b/src/bin/gl.rs index 3b7718b..26d600b 100644 --- a/src/bin/gl.rs +++ b/src/bin/gl.rs @@ -23,167 +23,167 @@ const NEAR: f32 = 0.01; const FAR: f32 = 1000.0; gfx_defines!{ - vertex Vertex { - pos: [f32; 3] = "a_pos", - uv: [f32; 2] = "a_uv", - } + vertex Vertex { + pos: [f32; 3] = "a_pos", + uv: [f32; 2] = "a_uv", + } - constant Trans { - matrix: [[f32; 4]; 4] = "u_matrix", - } + constant Trans { + matrix: [[f32; 4]; 4] = "u_matrix", + } - pipeline pipe { - vbuf: gfx::VertexBuffer = (), - trans: gfx::ConstantBuffer = "b_trans", - tiles: gfx::TextureSampler<[f32; 4]> = "t_tiles", - pixcolor: gfx::RenderTarget = "pixcolor", - depth: gfx::DepthTarget = gfx::preset::depth::LESS_EQUAL_WRITE, - } + pipeline pipe { + vbuf: gfx::VertexBuffer = (), + trans: gfx::ConstantBuffer = "b_trans", + tiles: gfx::TextureSampler<[f32; 4]> = "t_tiles", + pixcolor: gfx::RenderTarget = "pixcolor", + depth: gfx::DepthTarget = gfx::preset::depth::LESS_EQUAL_WRITE, + } } const POLYGON: [Vertex; 4] = [ - Vertex { pos: [ -0.25, -0.25, 0. ], uv: [0., 0.] }, - Vertex { pos: [ 0.25, -0.25, 0. ], uv: [1., 0.] }, - Vertex { pos: [ 0.25, 0.25, 0. ], uv: [1., 1.] }, - Vertex { pos: [ -0.25, 0.25, 0. ], uv: [0., 1.] } + Vertex { pos: [ -0.25, -0.25, 0. ], uv: [0., 0.] }, + Vertex { pos: [ 0.25, -0.25, 0. ], uv: [1., 0.] }, + Vertex { pos: [ 0.25, 0.25, 0. ], uv: [1., 1.] }, + Vertex { pos: [ -0.25, 0.25, 0. ], uv: [0., 1.] } ]; const POLYGON_IDX: &'static [u16] = &[ 0, 1, 2, 2, 3, 0 ]; fn main() { - env_logger::init().expect("env logger"); - let mut vr = vr::VR::new().expect("VR init"); - let render_size = vr.recommended_render_target_size(); + env_logger::init().expect("env logger"); + let mut vr = vr::VR::new().expect("VR init"); + let render_size = vr.recommended_render_target_size(); - let mut window: PistonWindow = - WindowSettings::new("Hello Virtual World!", [512; 2]) - .exit_on_esc(true) - .vsync(false) + let mut window: PistonWindow = + WindowSettings::new("Hello Virtual World!", [512; 2]) + .exit_on_esc(true) + .vsync(false) .build().expect("Building Window"); - let pso = window.factory.create_pipeline_simple(VERTEX_SHADER_SRC, - FRAGMENT_SHADER_SRC, - pipe::new()) - .expect("create pipeline"); + let pso = window.factory.create_pipeline_simple(VERTEX_SHADER_SRC, + FRAGMENT_SHADER_SRC, + pipe::new()) + .expect("create pipeline"); - let (tex_left, tgt_left, depth_left) = vr::create_eyebuffer(&mut window.factory, render_size) - .expect("create left renderbuffer"); - let (tex_right, tgt_right, depth_right) = vr::create_eyebuffer(&mut window.factory, render_size) - .expect("create right renderbuffer"); - let (vertex_buffer, slice) = window.factory.create_vertex_buffer_with_slice(&POLYGON, POLYGON_IDX); + let (tex_left, tgt_left, depth_left) = vr::create_eyebuffer(&mut window.factory, render_size) + .expect("create left renderbuffer"); + let (tex_right, tgt_right, depth_right) = vr::create_eyebuffer(&mut window.factory, render_size) + .expect("create right renderbuffer"); + let (vertex_buffer, slice) = window.factory.create_vertex_buffer_with_slice(&POLYGON, POLYGON_IDX); - let tiles = tile::get_tiles::<_, _, ColorFormat>(&mut window.factory); - let nn_sampler = window.factory.create_sampler( - tex::SamplerInfo::new(tex::FilterMethod::Scale, - tex::WrapMode::Clamp)); + let tiles = tile::get_tiles::<_, _, ColorFormat>(&mut window.factory); + let nn_sampler = window.factory.create_sampler( + tex::SamplerInfo::new(tex::FilterMethod::Scale, + tex::WrapMode::Clamp)); - let pipe_monitor = pipe::Data { - vbuf: vertex_buffer.clone(), - trans: window.factory.create_constant_buffer(1), - tiles: (tiles.clone(), nn_sampler.clone()), - pixcolor: window.output_color.clone(), - depth: window.output_stencil.clone(), - }; - let pipe_left = pipe::Data { - vbuf: vertex_buffer.clone(), - trans: window.factory.create_constant_buffer(1), - tiles: (tiles.clone(), nn_sampler.clone()), - pixcolor: tgt_left, - depth: depth_left, - }; - let pipe_right = pipe::Data { - vbuf: vertex_buffer.clone(), - trans: window.factory.create_constant_buffer(1), - tiles: (tiles.clone(), nn_sampler.clone()), - pixcolor: tgt_right, - depth: depth_right, - }; - window.encoder.update_constant_buffer( - &pipe_monitor.trans, - &Trans { matrix: *na::Matrix4::one().as_ref() }); + let pipe_monitor = pipe::Data { + vbuf: vertex_buffer.clone(), + trans: window.factory.create_constant_buffer(1), + tiles: (tiles.clone(), nn_sampler.clone()), + pixcolor: window.output_color.clone(), + depth: window.output_stencil.clone(), + }; + let pipe_left = pipe::Data { + vbuf: vertex_buffer.clone(), + trans: window.factory.create_constant_buffer(1), + tiles: (tiles.clone(), nn_sampler.clone()), + pixcolor: tgt_left, + depth: depth_left, + }; + let pipe_right = pipe::Data { + vbuf: vertex_buffer.clone(), + trans: window.factory.create_constant_buffer(1), + tiles: (tiles.clone(), nn_sampler.clone()), + pixcolor: tgt_right, + depth: depth_right, + }; + window.encoder.update_constant_buffer( + &pipe_monitor.trans, + &Trans { matrix: *na::Matrix4::one().as_ref() }); - let mut frame = 0u32; - window.window.swap_buffers(); frame += 1; // To contain setup calls to Frame 0 in apitrace - 'main: - //while let Some(_) = window.next() { - loop { - info!("Frame #{}", frame); - let _now = std::time::SystemTime::now(); + let mut frame = 0u32; + window.window.swap_buffers(); frame += 1; // To contain setup calls to Frame 0 in apitrace + 'main: + //while let Some(_) = window.next() { + loop { + info!("Frame #{}", frame); + let _now = std::time::SystemTime::now(); - // Get the current sensor state - let poses = vr.poses(); - trace!("\t{:?} got pose", _now.elapsed()); - if frame % 90 == 0 { - debug!("\t#{}: poses:", frame); - let mut devnum = 0; - for track in (0..poses.count).map(|pose| poses.poses[pose]) { - match track.device_class() { - vr::TrackedDeviceClass::Invalid => (), - _ => debug!("\t\t#{}: {:?} = valid? {}, connected? {}\n\t\t\t{:?}\n\t\t\t{:?}", - devnum, - track.device_class(), - track.is_valid, - track.is_connected, - track.to_device, - track.velocity) - } - devnum += 1; - } - debug!(""); - } - frame += 1; - - let mut hmd_mat = poses.poses[0].to_device.as_matrix4(); - hmd_mat.inverse_mut(); - - for pass in [(Some((vr::Eye::Left, &tex_left)), &pipe_left), - (Some((vr::Eye::Right, &tex_right)), &pipe_right), - (None, &pipe_monitor),] - .into_iter() { - info!("\tpass for eye: {:?}", pass.0); - window.encoder.clear(&pass.1.pixcolor, [0.1, 0.2, 0.3, 1.0]); - window.encoder.clear_depth(&pass.1.depth, 1.0); - - // Submit eye textures - if let Some((eye, tex)) = pass.0 { - let proj_mat = vr.projection_matrix(eye, NEAR, FAR); - let eye_mat = vr.head_to_eye_transform(eye); - for track in (0..poses.count).map(|pose| poses.poses[pose]) { - match track.device_class() { - vr::TrackedDeviceClass::Controller | - vr::TrackedDeviceClass::TrackingReference => { - let model_mat = track.to_device.as_matrix4(); - - let trans = Trans { matrix: *(proj_mat * eye_mat * hmd_mat * model_mat).as_ref() }; - window.encoder.update_constant_buffer(&pass.1.trans, &trans); - - window.encoder.draw(&slice, &pso, pass.1); - window.encoder.flush(&mut window.device); - }, - _ => () - } - } - - vr.submit(eye, tex); - trace!("\t\t{:?} submit {:?}", _now.elapsed(), eye); - } else { - window.encoder.draw(&slice, &pso, pass.1); - window.encoder.flush(&mut window.device); - window.window.swap_buffers(); - } - } - window.device.cleanup(); - - // handle window events - while let Some(ev) = window.poll_event() { - match ev { - piston_window::Input::Text(_) => break 'main, - _ => debug!("\t{:?}", ev) - } + // Get the current sensor state + let poses = vr.poses(); + trace!("\t{:?} got pose", _now.elapsed()); + if frame % 90 == 0 { + debug!("\t#{}: poses:", frame); + let mut devnum = 0; + for track in (0..poses.count).map(|pose| poses.poses[pose]) { + match track.device_class() { + vr::TrackedDeviceClass::Invalid => (), + _ => debug!("\t\t#{}: {:?} = valid? {}, connected? {}\n\t\t\t{:?}\n\t\t\t{:?}", + devnum, + track.device_class(), + track.is_valid, + track.is_connected, + track.to_device, + track.velocity) } + devnum += 1; + } + debug!(""); } + frame += 1; - debug!("shutting down"); + let mut hmd_mat = poses.poses[0].to_device.as_matrix4(); + hmd_mat.inverse_mut(); + + for pass in [(Some((vr::Eye::Left, &tex_left)), &pipe_left), + (Some((vr::Eye::Right, &tex_right)), &pipe_right), + (None, &pipe_monitor),] + .into_iter() { + info!("\tpass for eye: {:?}", pass.0); + window.encoder.clear(&pass.1.pixcolor, [0.1, 0.2, 0.3, 1.0]); + window.encoder.clear_depth(&pass.1.depth, 1.0); + + // Submit eye textures + if let Some((eye, tex)) = pass.0 { + let proj_mat = vr.projection_matrix(eye, NEAR, FAR); + let eye_mat = vr.head_to_eye_transform(eye); + for track in (0..poses.count).map(|pose| poses.poses[pose]) { + match track.device_class() { + vr::TrackedDeviceClass::Controller | + vr::TrackedDeviceClass::TrackingReference => { + let model_mat = track.to_device.as_matrix4(); + + let trans = Trans { matrix: *(proj_mat * eye_mat * hmd_mat * model_mat).as_ref() }; + window.encoder.update_constant_buffer(&pass.1.trans, &trans); + + window.encoder.draw(&slice, &pso, pass.1); + window.encoder.flush(&mut window.device); + }, + _ => () + } + } + + vr.submit(eye, tex); + trace!("\t\t{:?} submit {:?}", _now.elapsed(), eye); + } else { + window.encoder.draw(&slice, &pso, pass.1); + window.encoder.flush(&mut window.device); + window.window.swap_buffers(); + } + } + window.device.cleanup(); + + // handle window events + while let Some(ev) = window.poll_event() { + match ev { + piston_window::Input::Text(_) => break 'main, + _ => debug!("\t{:?}", ev) + } + } + } + + debug!("shutting down"); } const VERTEX_SHADER_SRC: &'static [u8] = br#"