tracking basestation and controllers

This commit is contained in:
2016-09-12 19:45:51 -07:00
parent 6e3ba7f6db
commit 251ace63a7
3 changed files with 111 additions and 46 deletions

View File

@@ -12,6 +12,8 @@ memmap = "~0.2"
gl = "*" gl = "*"
gfx = "*" gfx = "*"
gfx_device_gl = "*" gfx_device_gl = "*"
nalgebra = "*"
num-traits = "*"
openvr = { git = "https://github.com/rust-openvr/rust-openvr" } openvr = { git = "https://github.com/rust-openvr/rust-openvr" }
openvr_sys = "*" openvr_sys = "*"
piston = "*" piston = "*"

View File

@@ -1,35 +1,48 @@
extern crate vrtue; extern crate vrtue;
use vrtue::*; use vrtue::*;
use vrtue::vr::AsMatrix4;
extern crate env_logger; extern crate env_logger;
#[macro_use] extern crate gfx; #[macro_use] extern crate gfx;
extern crate gl; extern crate gl;
#[macro_use] extern crate log; #[macro_use] extern crate log;
extern crate nalgebra as na;
extern crate num_traits;
extern crate piston_window; extern crate piston_window;
use gfx::Device; use gfx::Device;
use gfx::traits::FactoryExt; use gfx::traits::FactoryExt;
use na::Inverse;
use num_traits::identities::One;
use piston_window::{PistonWindow, Window, WindowSettings}; use piston_window::{PistonWindow, Window, WindowSettings};
pub type ColorFormat = gfx::format::Srgba8; pub type ColorFormat = gfx::format::Srgba8;
//pub type DepthFormat = gfx::format::DepthStencil; //pub type DepthFormat = gfx::format::DepthStencil;
const NEAR: f32 = 0.01;
const FAR: f32 = 1000.0;
gfx_defines!{ gfx_defines!{
vertex Vertex { vertex Vertex {
pos: [f32; 2] = "a_pos", pos: [f32; 3] = "a_pos",
color: [f32; 3] = "a_color", color: [f32; 3] = "a_color",
} }
constant Trans {
matrix: [[f32; 4]; 4] = "u_matrix",
}
pipeline pipe { pipeline pipe {
vbuf: gfx::VertexBuffer<Vertex> = (), vbuf: gfx::VertexBuffer<Vertex> = (),
trans: gfx::ConstantBuffer<Trans> = "b_trans",
pixcolor: gfx::RenderTarget<ColorFormat> = "pixcolor", pixcolor: gfx::RenderTarget<ColorFormat> = "pixcolor",
} }
} }
const TRIANGLE: [Vertex; 3] = [ const TRIANGLE: [Vertex; 3] = [
Vertex { pos: [ -0.5, -0.5 ], color: [1.0, 0.0, 0.0] }, Vertex { pos: [ -0.25, -0.25, 0. ], color: [1.0, 0.0, 0.0] },
Vertex { pos: [ 0.5, -0.5 ], color: [0.0, 1.0, 0.0] }, Vertex { pos: [ 0.25, -0.25, 0. ], color: [0.0, 1.0, 0.0] },
Vertex { pos: [ 0.0, 0.5 ], color: [0.0, 0.0, 1.0] } Vertex { pos: [ 0.0, 0.25, 0. ], color: [0.0, 0.0, 1.0] }
]; ];
fn main() { fn main() {
@@ -41,15 +54,8 @@ fn main() {
WindowSettings::new("Hello Virtual World!", [512; 2]) WindowSettings::new("Hello Virtual World!", [512; 2])
.exit_on_esc(true) .exit_on_esc(true)
.vsync(false) .vsync(false)
//.vsync(true)
.build().expect("Building Window"); .build().expect("Building Window");
/*
let _sysleft = system.projection_matrix(vr::Eye::Left, 0.01, 1000.0);
let _eyeleft = system.eye_to_head_transform(vr::Eye::Left);
let _sysright = system.projection_matrix(vr::Eye::Right, 0.01, 1000.0);
let _eyeright = system.eye_to_head_transform(vr::Eye::Right);
*/
let pso = window.factory.create_pipeline_simple(VERTEX_SHADER_SRC, let pso = window.factory.create_pipeline_simple(VERTEX_SHADER_SRC,
FRAGMENT_SHADER_SRC, FRAGMENT_SHADER_SRC,
pipe::new()) pipe::new())
@@ -63,18 +69,24 @@ fn main() {
let pipe_monitor = pipe::Data { let pipe_monitor = pipe::Data {
vbuf: vertex_buffer.clone(), vbuf: vertex_buffer.clone(),
trans: window.factory.create_constant_buffer(1),
pixcolor: window.output_color.clone(), pixcolor: window.output_color.clone(),
}; };
let pipe_left = pipe::Data { let pipe_left = pipe::Data {
vbuf: vertex_buffer.clone(), vbuf: vertex_buffer.clone(),
trans: window.factory.create_constant_buffer(1),
pixcolor: tgt_left, pixcolor: tgt_left,
}; };
let pipe_right = pipe::Data { let pipe_right = pipe::Data {
vbuf: vertex_buffer.clone(), vbuf: vertex_buffer.clone(),
trans: window.factory.create_constant_buffer(1),
pixcolor: tgt_right, pixcolor: tgt_right,
}; };
window.encoder.update_constant_buffer(
&pipe_monitor.trans,
&Trans { matrix: *na::Matrix4::one().as_ref() });
let mut frame = 0; let mut frame = 0u32;
window.window.swap_buffers(); frame += 1; // To contain setup calls to Frame 0 in apitrace window.window.swap_buffers(); frame += 1; // To contain setup calls to Frame 0 in apitrace
'main: 'main:
//while let Some(_) = window.next() { //while let Some(_) = window.next() {
@@ -83,27 +95,63 @@ fn main() {
let _now = std::time::SystemTime::now(); let _now = std::time::SystemTime::now();
// Get the current sensor state // Get the current sensor state
let _poses = vr.poses(); let poses = vr.poses();
trace!("\t{:?} got pose", _now.elapsed()); trace!("\t{:?} got pose", _now.elapsed());
if frame % 90 == 0 { if frame % 90 == 0 {
warn!("\t#{}: poses: {:?}\n", frame, _poses.poses[0]); debug!("\t#{}: poses:", frame);
let mut devnum = 0;
for track in (0..poses.count).map(|pose| poses.poses[pose]) {
match track.device_class() {
vr::TrackedDeviceClass::Invalid => (),
_ => debug!("\t\t#{}: {:?} = valid? {}, connected? {}\n\t\t\t{:?}\n\t\t\t{:?}",
devnum,
track.device_class(),
track.is_valid,
track.is_connected,
track.to_device,
track.velocity)
}
devnum += 1;
}
debug!("");
} }
frame += 1; frame += 1;
let mut hmd_mat = poses.poses[0].to_device.as_matrix4();
hmd_mat.inverse_mut();
for pass in [(Some((vr::Eye::Left, &tex_left)), &pipe_left), for pass in [(Some((vr::Eye::Left, &tex_left)), &pipe_left),
(Some((vr::Eye::Right, &tex_right)), &pipe_right), (Some((vr::Eye::Right, &tex_right)), &pipe_right),
(None, &pipe_monitor),] (None, &pipe_monitor),]
.into_iter() { .into_iter() {
info!("\tpass for eye: {:?}", pass.0); info!("\tpass for eye: {:?}", pass.0);
window.encoder.clear(&pass.1.pixcolor, [0.1, 0.5, 0.1, 1.0]); window.encoder.clear(&pass.1.pixcolor, [0.1, 0.2, 0.3, 1.0]);
window.encoder.draw(&slice, &pso, pass.1);
window.encoder.flush(&mut window.device);
// Submit eye textures // Submit eye textures
if let Some((eye, tex)) = pass.0 { if let Some((eye, tex)) = pass.0 {
let proj_mat = vr.projection_matrix(eye, NEAR, FAR);
let eye_mat = vr.head_to_eye_transform(eye);
for track in (0..poses.count).map(|pose| poses.poses[pose]) {
match track.device_class() {
vr::TrackedDeviceClass::Controller |
vr::TrackedDeviceClass::TrackingReference => {
let model_mat = track.to_device.as_matrix4();
let trans = Trans { matrix: *(proj_mat * eye_mat * hmd_mat * model_mat).as_ref() };
window.encoder.update_constant_buffer(&pass.1.trans, &trans);
window.encoder.draw(&slice, &pso, pass.1);
window.encoder.flush(&mut window.device);
},
_ => ()
}
}
vr.submit(eye, tex); vr.submit(eye, tex);
trace!("\t\t{:?} submit {:?}", _now.elapsed(), eye); trace!("\t\t{:?} submit {:?}", _now.elapsed(), eye);
} else { } else {
window.encoder.draw(&slice, &pso, pass.1);
window.encoder.flush(&mut window.device);
window.window.swap_buffers(); window.window.swap_buffers();
} }
} }
@@ -118,45 +166,22 @@ fn main() {
} }
} }
info!("shutting down"); debug!("shutting down");
} }
/*
fn gl_debug(device: &mut gfx_device_gl::Device, msg: &'static [u8; 6]) {
unsafe {
device.with_gl_naked(|gl| {
gl.DebugMessageInsert(gl::DEBUG_SOURCE_APPLICATION,
gl::DEBUG_TYPE_OTHER,
0,
gl::DEBUG_SEVERITY_LOW,
msg.len() as i32,
::std::mem::transmute(msg));
});
}
}
fn check_err(device: &mut gfx_device_gl::Device) {
unsafe {
device.with_gl_naked(|gl| {
let err: gl::types::GLenum = gl.GetError();
if err != gl::NO_ERROR {
panic!("GL Error! {:?}", err);
}
});
}
}
*/
const VERTEX_SHADER_SRC: &'static [u8] = br#" const VERTEX_SHADER_SRC: &'static [u8] = br#"
#version 140 #version 140
in vec2 a_pos; in vec3 a_pos;
in vec3 a_color; in vec3 a_color;
out vec3 v_color; out vec3 v_color;
uniform b_trans {
mat4 u_matrix;
};
void main() { void main() {
v_color = a_color; v_color = a_color;
gl_Position = vec4(a_pos, 0.0, 1.0); gl_Position = u_matrix * vec4(a_pos, 1.0);
} }
"#; "#;

View File

@@ -1,11 +1,17 @@
extern crate gfx; extern crate gfx;
extern crate gfx_device_gl; extern crate gfx_device_gl;
extern crate nalgebra as na;
extern crate num_traits;
extern crate openvr as vr; extern crate openvr as vr;
extern crate openvr_sys; extern crate openvr_sys;
use self::gfx::{tex, Factory, Typed}; use self::gfx::{tex, Factory, Typed};
pub use self::vr::Eye; pub use self::vr::Eye;
pub use self::vr::tracking::TrackedDeviceClass;
use self::na::Inverse;
use self::num_traits::identities::Zero;
use self::num_traits::identities::One;
pub struct VR { pub struct VR {
system: vr::IVRSystem, system: vr::IVRSystem,
@@ -40,6 +46,16 @@ impl VR {
pub fn recommended_render_target_size(&self) -> vr::common::Size { pub fn recommended_render_target_size(&self) -> vr::common::Size {
self.system.recommended_render_target_size() self.system.recommended_render_target_size()
} }
pub fn projection_matrix(self: &Self, eye: Eye, near: f32, far: f32) -> na::Matrix4<f32> {
self.system.projection_matrix(eye, near, far).as_matrix4()
}
pub fn head_to_eye_transform(self: &Self, eye: Eye) -> na::Matrix4<f32> {
let mut mat = self.system.eye_to_head_transform(eye).as_matrix4();
assert!(mat.inverse_mut(), "inverse eye matrix");
mat
}
} }
impl Drop for VR { impl Drop for VR {
@@ -48,6 +64,28 @@ impl Drop for VR {
} }
} }
pub trait AsMatrix4<N> {
fn as_matrix4(self) -> na::Matrix4<N>;
}
impl<N: Copy + Zero + One> AsMatrix4<N> for [[N; 4]; 3] {
#[inline]
fn as_matrix4(self) -> na::Matrix4<N> {
na::Matrix4::new(self[0][0], self[0][1], self[0][2], self[0][3],
self[1][0], self[1][1], self[1][2], self[1][3],
self[2][0], self[2][1], self[2][2], self[2][3],
N::zero(), N::zero(), N::zero(), N::one())
}
}
impl<N: Copy> AsMatrix4<N> for [[N; 4]; 4] {
#[inline]
fn as_matrix4(self) -> na::Matrix4<N> {
na::Matrix4::new(self[0][0], self[0][1], self[0][2], self[0][3],
self[1][0], self[1][1], self[1][2], self[1][3],
self[2][0], self[2][1], self[2][2], self[2][3],
self[3][0], self[3][1], self[3][2], self[3][3])
}
}
pub fn create_eyebuffer<T>(factory: &mut gfx_device_gl::Factory, pub fn create_eyebuffer<T>(factory: &mut gfx_device_gl::Factory,
size: vr::common::Size) size: vr::common::Size)
-> Result<(gfx::handle::Texture<gfx_device_gl::Resources, -> Result<(gfx::handle::Texture<gfx_device_gl::Resources,