model-converter/src/programs/viewer.rs

332 lines
9.7 KiB
Rust

extern crate serde;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate log;
extern crate clap;
extern crate glium;
extern crate model_converter;
extern crate stderrlog;
use std::fs::File;
use std::io::Write;
use std::process::exit;
use std::thread::sleep;
use std::time::{Duration, Instant};
use clap::{App, Arg};
use glium::glutin;
use glium::glutin::event_loop::EventLoop;
use glium::glutin::window::WindowBuilder;
use glium::Display;
use glium::glutin::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use model_converter::camera::Camera;
use model_converter::controls::{FirstPersonControls, OrbitControls};
use model_converter::math::bounding_box::BoundingBox3;
use model_converter::math::vector::Vector3;
use model_converter::parser::parse_file;
use model_converter::renderer::Renderer;
use model_converter::scene::Scene;
fn as_millis(duration: Duration) -> u64 {
duration.as_secs() * 1_000 + (duration.subsec_nanos() as u64) / 1_000_000
}
#[derive(Serialize, Deserialize)]
struct Vector {
x: f64,
y: f64,
z: f64,
}
impl From<Vector3<f64>> for Vector {
fn from(v: Vector3<f64>) -> Vector {
Vector {
x: v[0],
y: v[1],
z: v[2],
}
}
}
#[derive(Serialize, Deserialize)]
struct CameraEvent {
position: Vector,
target: Vector,
}
fn main() {
let matches = App::new("3D Viewer")
.version("1.0")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("FILES")
.takes_value(true)
.multiple(true)
.help("Input model files")
.required(true),
)
.arg(
Arg::with_name("first person")
.short("f")
.long("first-person")
.help("Uses first person controls instead of orbit controls"),
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Shows logs during the parsing of the model"),
)
.get_matches();
// Set verbose flag
stderrlog::new()
.module(module_path!())
.verbosity(matches.occurrences_of("verbose") as usize)
.init()
.expect("Couldn't initialize logger");
let mut capture_count = 0;
let mut path_count = 0;
let mut path = vec![];
use std::f64::{MAX, MIN};
let mut bbox = BoundingBox3::new(Vector3::new(MAX, MAX, MAX), Vector3::new(MIN, MIN, MIN));
let mut models = vec![];
for input in matches.values_of("input").unwrap() {
info!("Parsing model {}", input);
match parse_file(&input) {
Ok(model) => {
if model.vertices.len() > 0 {
bbox = bbox.union(&model.bounding_box());
}
models.push((input.to_owned(), model))
}
Err(e) => {
error!("Error while parsing file: {}", e);
exit(1);
}
}
}
let event_loop = EventLoop::new();
let window = WindowBuilder::new().with_visible(false);
let context = glutin::ContextBuilder::new().with_depth_buffer(24);
let display = Display::new(window, context, &event_loop).unwrap();
let mut renderer = Renderer::new(display);
let mut scene = Scene::new();
let mut before;
let mut duration;
for (name, mut model) in models {
info!("Scaling model {}...", name);
model.center_and_scale_from_box(&bbox);
info!("Building textures for model {}...", name);
before = Instant::now();
model.build_textures(&renderer);
duration = Instant::now().duration_since(before);
info!("Done in {}ms.", as_millis(duration));
info!("Building vertex buffers for model {}...", name);
before = Instant::now();
model.build_vertex_buffers(&renderer);
duration = Instant::now().duration_since(before);
scene.emplace(model);
info!("Done in {}ms.", as_millis(duration));
info!("Finished");
}
let center = (bbox.min() + bbox.max()) / 2.0;
let size = (bbox.max() - bbox.min()).norm();
let center_f64 = Vector3::new(center.x() as f64, center.y() as f64, center.z() as f64);
let size_f64 = size as f64;
let mut camera = Camera::new(
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 1.0, 0.0),
);
camera.z_near = 0.0001;
let mut controls: Box<dyn Controls> = if matches.is_present("first person") {
Box::new(FirstPersonControls::new())
} else {
Box::new(OrbitControls::new(
Vector3::new(0.0, 0.0, 0.0),
1.0,
&mut camera,
))
};
renderer.show();
use model_converter::controls::Controls;
let mut recording = false;
let mut before = Instant::now();
event_loop.run(move |ev, _, control_flow| {
let mut should_screenshot = false;
controls.manage_event(&ev, &mut camera, &renderer);
match ev {
// No idea what this is
Event::NewEvents(cause) => match cause {
glium::glutin::event::StartCause::ResumeTimeReached { .. } => (),
glium::glutin::event::StartCause::Init => (),
_ => return,
},
// Close window
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => *control_flow = glutin::event_loop::ControlFlow::Exit,
// Escape key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Escape),
state: ElementState::Pressed,
..
},
..
},
..
} => *control_flow = glutin::event_loop::ControlFlow::Exit,
// R key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::R),
state: ElementState::Pressed,
..
},
..
},
..
} => {
if !recording {
path.clear();
recording = true;
} else {
recording = false;
let string = serde_json::to_string(&path).unwrap();
let mut file = File::create(format!("path-{}.json", path_count)).unwrap();
file.write_all(string.as_bytes()).unwrap();
path_count += 1;
}
}
// Enter key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Return),
state: ElementState::Pressed,
..
},
..
},
..
} => {
trace!("Camera:");
let world_position = camera.position * size_f64 + center_f64;
let world_target = camera.target * size_f64 + center_f64;
trace!(
"\tPosition: ({}, {}, {})",
world_position.x(),
world_position.y(),
world_position.z()
);
trace!(
"\tTarget: ({}, {}, {})",
world_target.x(),
world_target.y(),
world_target.z()
);
trace!(
"\tUp: ({}, {}, {})",
camera.up.x(),
camera.up.y(),
camera.up.z()
);
}
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::C),
state: ElementState::Pressed,
..
},
..
},
..
} => should_screenshot = true,
_ => (),
}
controls.update(&mut camera, &renderer);
renderer.render(&scene, &camera);
if recording {
let position = camera.position * size_f64 + center_f64;
let target = camera.target * size_f64 + center_f64;
path.push(CameraEvent {
position: position.into(),
target: target.into(),
});
}
if should_screenshot {
// Make a screenshot
let cap = renderer.capture();
cap.save(format!("capture-{}.png", capture_count)).unwrap();
capture_count += 1;
}
let elapsed = as_millis(Instant::now().duration_since(before));
if elapsed < 20 {
sleep(Duration::from_millis(20 - elapsed));
}
before = Instant::now();
});
}