Compare commits

..

1 Commits

Author SHA1 Message Date
Thomas Forgione ebb55da52c
Nice 2019-10-03 16:25:28 +02:00
7 changed files with 369 additions and 370 deletions

View File

@ -7,8 +7,8 @@ authors = ["Thomas Forgione <thomas@tforgione.fr>"]
log = "0.4"
stderrlog = "0.4.1"
num = "0.1.42"
glium = "0.32.1"
image = "0.23"
glium = "0.22.0"
image = "0.21.2"
byteorder = "1.2.3"
clap = "2.31.2"
# nalgebra = "0.16.13"

View File

@ -1,31 +1,75 @@
#version 140
#extension GL_OES_standard_derivatives : enable
uniform sampler2D tex;
uniform vec3 diffuse;
in vec3 v_normal;
in vec2 v_tex_coords;
in vec3 v_barycentric;
in vec4 v_position;
out vec4 color;
vec4 wireframe_color;
vec4 render_color;
vec3 ambientLight = vec3(0.3,0.3,0.3);
vec3 directionnalLight = normalize(vec3(10,5,7));
vec3 directionnalLightFactor = vec3(0.6,0.6,0.6);
// float edgeFactor(vec3 a){
// vec3 d = fwidth(v_barycentric);
// vec3 a3 = smoothstep(vec3(0.0), d*1.5, v_barycentric);
// return min(min(a3.x, a3.y), a3.z);
// }
void main() {
vec3 d = fwidth(v_barycentric);
vec3 a3 = smoothstep(vec3(0.0), 0.8 * d, v_barycentric);
float scale = (1 - min(min(a3.x, a3.y), a3.z)) / 2 + 0.5;
wireframe_color = vec4(scale, scale, scale, 1.0);
// float threshold = 0.1;
// vec3 d = fwidth(v_barycentric);
// if (d.x < threshold || d.y < threshold || d.z < threshold) {
// wireframe_color = vec4(1.0, 1.0, 1.0, 1.0);
// } else {
// wireframe_color = vec4(0.5, 0.5, 0.5, 1.0);
// }
vec3 lambertComponent = dot(directionnalLight, v_normal) * directionnalLightFactor;
lambertComponent = max(vec3(0.0, 0.0, 0.0), lambertComponent);
vec4 factor = vec4(ambientLight + lambertComponent, 1.0);
color = factor * vec4(diffuse, 1.0) * texture(tex, v_tex_coords);
render_color = factor * vec4(diffuse, 1.0) * texture(tex, v_tex_coords);
if (color.a < 0.05) {
if (render_color.a < 0.05) {
discard;
} else {
color.a = 1.0;
render_color.a = 1.0;
}
float z_min = 0.15;
float z_max = 0.25;
float lambda = (v_position.z - z_min) / (z_max - z_min);
if (lambda < 0) {
lambda = 0;
}
if (lambda > 1) {
lambda = 1;
}
color = lambda * wireframe_color + (1 - lambda) * render_color;
}

View File

@ -6,14 +6,19 @@ uniform vec3 texture_size;
in vec3 vertex;
in vec2 tex_coords;
in vec3 barycentric;
in vec3 normal;
out vec2 v_tex_coords;
out vec3 v_normal;
out vec3 v_barycentric;
out vec4 v_position;
void main() {
v_normal = normal;
v_barycentric = barycentric;
v_tex_coords = vec2(tex_coords.x * texture_size.x, tex_coords.y * texture_size.y);
gl_Position = perspective * view * vec4(vertex, 1.0);
v_position = gl_Position;
}

View File

@ -2,20 +2,30 @@
const EPSILON: f64 = 0.001;
use glium::glutin::dpi::{PhysicalPosition, PhysicalSize};
use glium::glutin::event::{
ElementState, Event, KeyboardInput, MouseButton, MouseScrollDelta, VirtualKeyCode, WindowEvent,
use glium::glutin::{
Event,
WindowEvent,
ElementState,
MouseButton,
MouseScrollDelta,
KeyboardInput,
VirtualKeyCode,
};
use glium::glutin::dpi::{
LogicalSize,
LogicalPosition,
};
use camera::Camera;
use math::vector::{Vector2, Vector3};
use model::Model;
use camera::Camera;
use renderer::Renderer;
use model::Model;
/// The trait that all controls should implement.
pub trait Controls {
/// Modifies the camera depending on the event.
fn manage_event(&mut self, event: &Event<()>, camera: &mut Camera, renderer: &Renderer);
fn manage_event(&mut self, event: &Event, camera: &mut Camera, renderer: &Renderer);
/// Updates the camera depending on time.
fn update(&mut self, camera: &mut Camera, renderer: &Renderer);
@ -48,6 +58,7 @@ pub struct OrbitControls {
}
impl OrbitControls {
/// Creates a new orbit controls, and initializes the camera.
pub fn new(center: Vector3<f64>, distance: f64, camera: &mut Camera) -> OrbitControls {
let controls = OrbitControls {
@ -79,40 +90,35 @@ impl OrbitControls {
OrbitControls::new(
Vector3::new(center.x() as f64, center.y() as f64, center.z() as f64),
distance as f64,
camera,
camera
)
}
}
impl Controls for OrbitControls {
fn manage_event(&mut self, event: &Event<()>, camera: &mut Camera, _: &Renderer) {
fn manage_event(&mut self, event: &Event, camera: &mut Camera, _: &Renderer) {
match *event {
Event::WindowEvent {
event:
WindowEvent::MouseInput {
button: MouseButton::Left,
state,
..
},
..
event: WindowEvent::MouseInput {
button: MouseButton::Left,
state, ..
}, ..
} => {
self.pressed = state == ElementState::Pressed;
}
},
Event::WindowEvent {
event: WindowEvent::Resized(PhysicalSize { width, height }),
..
event: WindowEvent::Resized(LogicalSize { width, height }), ..
} => {
camera.aspect_ratio = width as f64 / height as f64;
}
},
Event::WindowEvent {
event:
WindowEvent::MouseWheel {
delta: MouseScrollDelta::LineDelta(_, y),
..
},
..
event: WindowEvent::MouseWheel {
delta: MouseScrollDelta::LineDelta(_, y), ..
}, ..
} => {
self.distance -= y as f64 / self.sensitivity;
@ -122,15 +128,12 @@ impl Controls for OrbitControls {
camera.position += self.center;
camera.target = self.center;
}
},
Event::WindowEvent {
event:
WindowEvent::CursorMoved {
position: PhysicalPosition { x, y },
..
},
..
Event::WindowEvent{
event: WindowEvent::CursorMoved {
position: LogicalPosition { x, y }, ..
}, ..
} => {
let current_position = Vector2::new(x as f64, y as f64);
@ -138,11 +141,11 @@ impl Controls for OrbitControls {
let difference = (current_position - self.mouse_position) / self.sensitivity;
self.theta += difference.x();
self.phi += difference.y();
self.phi += difference.y();
use std::f64::consts::PI;
self.phi = self.phi.max(-PI / 2.0 + EPSILON);
self.phi = self.phi.min(PI / 2.0 - EPSILON);
self.phi = self.phi.max(- PI/2.0 + EPSILON);
self.phi = self.phi.min( PI/2.0 - EPSILON);
*camera.position.x_mut() = self.distance * self.phi.cos() * self.theta.cos();
*camera.position.y_mut() = self.distance * self.phi.sin();
@ -150,21 +153,25 @@ impl Controls for OrbitControls {
camera.position += self.center;
camera.target = self.center;
}
// Record new position
self.mouse_position = current_position;
}
},
_ => (),
}
}
fn update(&mut self, _: &mut Camera, _: &Renderer) {}
fn update(&mut self, _: &mut Camera, _: &Renderer) {
}
}
/// First person controls, just like in video games.
pub struct FirstPersonControls {
/// Theta angle of the spheric coordinates of the direction of the camera.
theta: f64,
@ -229,121 +236,90 @@ impl FirstPersonControls {
}
impl Controls for FirstPersonControls {
fn manage_event(&mut self, event: &Event<()>, camera: &mut Camera, renderer: &Renderer) {
fn manage_event(&mut self, event: &Event, camera: &mut Camera, renderer: &Renderer) {
match *event {
// On resize window
Event::WindowEvent {
event: WindowEvent::Resized(PhysicalSize { width, height }),
..
event: WindowEvent::Resized(LogicalSize { width, height } ), ..
} => {
camera.aspect_ratio = width as f64 / height as f64;
}
},
// On Z pressed
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Z),
state,
..
},
..
},
..
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Z), state, ..
}, ..
}, ..
} => {
self.forward_pressed = state == ElementState::Pressed;
}
},
// On S pressed
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::S),
state,
..
},
..
},
..
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::S), state, ..
}, ..
}, ..
} => {
self.backward_pressed = state == ElementState::Pressed;
}
},
// On Q pressed
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Q),
state,
..
},
..
},
..
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Q), state, ..
}, ..
}, ..
} => {
self.left_pressed = state == ElementState::Pressed;
}
},
// On D pressed
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::D),
state,
..
},
..
},
..
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::D), state, ..
}, ..
}, ..
} => {
self.right_pressed = state == ElementState::Pressed;
}
},
// On Space pressed
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Space),
state,
..
},
..
},
..
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Space), state, ..
}, ..
}, ..
} => {
self.boost = state == ElementState::Pressed;
}
},
// On mouse move
Event::WindowEvent {
event:
WindowEvent::CursorMoved {
position: PhysicalPosition { x, y },
..
},
..
event: WindowEvent::CursorMoved {
position: LogicalPosition { x, y }, ..
}, ..
} => {
let size = renderer.gl_window().window().inner_size();
let size = renderer.gl_window().window().get_inner_size().unwrap();
let center = Vector2::new(size.width as f64 / 2.0, size.height as f64 / 2.0);
let current_position = Vector2::new(x as f64, y as f64);
let difference = (current_position - center) / self.sensitivity;
self.theta += difference.x();
self.phi -= difference.y();
self.phi -= difference.y();
use std::f64::consts::PI;
self.phi = self.phi.max(-PI / 2.0 + EPSILON);
self.phi = self.phi.min(PI / 2.0 - EPSILON);
self.phi = self.phi.max(- PI/2.0 + EPSILON);
self.phi = self.phi.min( PI/2.0 - EPSILON);
self.forward = Vector3::new(
self.phi.cos() * self.theta.cos(),
@ -351,29 +327,29 @@ impl Controls for FirstPersonControls {
self.phi.cos() * self.theta.sin(),
);
self.left = Vector3::new(0.0, 1.0, 0.0)
.cross_product(self.forward)
.normalized();
self.left = Vector3::new(0.0, 1.0, 0.0).cross_product(self.forward).normalized();
// Move the cursor back to the center
renderer
.gl_window()
.window()
.set_cursor_position(PhysicalPosition::new(
size.width as f64 / 2.0,
size.height as f64 / 2.0,
))
.set_cursor_position(LogicalPosition::new(
size.width / 2.0, size.height / 2.0))
.unwrap();
}
},
_ => (),
}
self.update_camera(camera);
}
fn update(&mut self, camera: &mut Camera, renderer: &Renderer) {
renderer.gl_window().window().set_cursor_visible(false);
renderer.gl_window().hide_cursor(true);
let mut speed = Vector3::new(0.0, 0.0, 0.0);

View File

@ -23,13 +23,14 @@ use renderer::Renderer;
#[derive(Copy, Clone, Debug, PartialEq)]
/// A raw vertex, with its 3D coordinates, texture coordinates and normals.
pub struct Vertex {
vertex: [f64; 3],
tex_coords: [f64; 2],
normal: [f64; 3],
face_color: [f64; 3],
vertex: [f64; 3],
tex_coords: [f64; 2],
normal: [f64; 3],
barycentric: [f64; 3],
face_color: [f64; 3],
}
implement_vertex!(Vertex, vertex, tex_coords, normal, face_color);
implement_vertex!(Vertex, vertex, tex_coords, normal, barycentric, face_color);
/// A part of a 3D model.
///
@ -534,7 +535,12 @@ impl Model {
let mut vertex_buffer = vec![];
for face in part.faces() {
for &&v in &[&face.a, &face.b, &face.c] {
let v0 = vertices[face.a.vertex];
let v1 = vertices[face.b.vertex];
let v2 = vertices[face.c.vertex];
let barycenter = (v0 + v1 + v2) / 3.0;
for (index, &&v) in [&face.a, &face.b, &face.c].iter().enumerate() {
let vertex = vertices[v.vertex].into();
let tex_coord = if let Some(tex_index) = v.texture_coordinate {
texture_coordinates[tex_index].into()
@ -542,6 +548,14 @@ impl Model {
[0.0, 0.0]
};
let barycentric = match index {
0 => [1.0, 0.0, 0.0],
1 => [0.0, 1.0, 0.0],
2 => [0.0, 0.0, 1.0],
_ => unreachable!(),
};
let normal = if let Some(normal_index) = v.normal {
normals[normal_index].into()
} else {
@ -564,10 +578,11 @@ impl Model {
];
vertex_buffer.push(Vertex {
vertex: vertex,
tex_coords: tex_coord,
normal: normal,
face_color: [r, g, b],
vertex: vertex,
tex_coords: tex_coord,
normal: normal,
barycentric: barycentric,
face_color: [r, g, b],
});
}
}

View File

@ -6,33 +6,37 @@ extern crate serde_derive;
#[macro_use]
extern crate log;
extern crate stderrlog;
extern crate clap;
extern crate glium;
extern crate model_converter;
extern crate stderrlog;
use std::fs::File;
use std::io::Write;
use std::process::exit;
use std::time::{Instant, Duration};
use std::thread::sleep;
use std::time::{Duration, Instant};
use clap::{App, Arg};
use glium::glutin;
use glium::glutin::event_loop::EventLoop;
use glium::glutin::window::WindowBuilder;
use glium::Display;
use glium::glutin;
use glium::glutin::{EventsLoop, WindowBuilder};
use glium::glutin::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use glium::glutin::{
Event,
WindowEvent,
VirtualKeyCode,
ElementState,
};
use model_converter::camera::Camera;
use model_converter::controls::{FirstPersonControls, OrbitControls};
use model_converter::scene::Scene;
use model_converter::math::bounding_box::BoundingBox3;
use model_converter::math::vector::Vector3;
use model_converter::parser::parse_file;
use model_converter::renderer::Renderer;
use model_converter::scene::Scene;
use model_converter::controls::{OrbitControls, FirstPersonControls};
use model_converter::camera::Camera;
fn as_millis(duration: Duration) -> u64 {
duration.as_secs() * 1_000 + (duration.subsec_nanos() as u64) / 1_000_000
@ -62,31 +66,26 @@ struct CameraEvent {
}
fn main() {
let matches = App::new("3D Viewer")
.version("1.0")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("FILES")
.takes_value(true)
.multiple(true)
.help("Input model files")
.required(true),
)
.arg(
Arg::with_name("first person")
.short("f")
.long("first-person")
.help("Uses first person controls instead of orbit controls"),
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Shows logs during the parsing of the model"),
)
.arg(Arg::with_name("input")
.short("i")
.long("input")
.value_name("FILES")
.takes_value(true)
.multiple(true)
.help("Input model files")
.required(true))
.arg(Arg::with_name("first person")
.short("f")
.long("first-person")
.help("Uses first person controls instead of orbit controls"))
.arg(Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Shows logs during the parsing of the model"))
.get_matches();
// Set verbose flag
@ -100,12 +99,16 @@ fn main() {
let mut path_count = 0;
let mut path = vec![];
use std::f64::{MAX, MIN};
let mut bbox = BoundingBox3::new(Vector3::new(MAX, MAX, MAX), Vector3::new(MIN, MIN, MIN));
use std::f64::{MIN, MAX};
let mut bbox = BoundingBox3::new(
Vector3::new(MAX, MAX, MAX),
Vector3::new(MIN, MIN, MIN),
);
let mut models = vec![];
for input in matches.values_of("input").unwrap() {
info!("Parsing model {}", input);
match parse_file(&input) {
@ -114,20 +117,21 @@ fn main() {
bbox = bbox.union(&model.bounding_box());
}
models.push((input.to_owned(), model))
}
},
Err(e) => {
error!("Error while parsing file: {}", e);
exit(1);
}
},
}
}
let event_loop = EventLoop::new();
let window = WindowBuilder::new().with_visible(false);
let mut events_loop = EventsLoop::new();
let window = WindowBuilder::new().with_visibility(false);
let context = glutin::ContextBuilder::new().with_depth_buffer(24);
let display = Display::new(window, context, &event_loop).unwrap();
let display = Display::new(window, context, &events_loop).unwrap();
let mut renderer = Renderer::new(display);
renderer.set_clear_color(1.0, 1.0, 1.0, 1.0);
let mut scene = Scene::new();
let mut before;
@ -160,21 +164,23 @@ fn main() {
let center_f64 = Vector3::new(center.x() as f64, center.y() as f64, center.z() as f64);
let size_f64 = size as f64;
let mut closed = false;
let mut camera = Camera::new(
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 1.0, 0.0),
Vector3::new( 0.0, 0.0, 0.0),
Vector3::new( 0.0, 0.0, 0.0),
Vector3::new( 0.0, 1.0, 0.0),
);
camera.z_near = 0.0001;
let mut controls: Box<dyn Controls> = if matches.is_present("first person") {
let mut controls: Box<Controls> = if matches.is_present("first person") {
Box::new(FirstPersonControls::new())
} else {
Box::new(OrbitControls::new(
Vector3::new(0.0, 0.0, 0.0),
1.0,
&mut camera,
&mut camera
))
};
@ -185,120 +191,86 @@ fn main() {
let mut recording = false;
let mut before = Instant::now();
event_loop.run(move |ev, _, control_flow| {
while !closed {
let mut should_screenshot = false;
controls.manage_event(&ev, &mut camera, &renderer);
match ev {
// No idea what this is
Event::NewEvents(cause) => match cause {
glium::glutin::event::StartCause::ResumeTimeReached { .. } => (),
glium::glutin::event::StartCause::Init => (),
_ => return,
},
events_loop.poll_events(|ev| {
// Close window
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => *control_flow = glutin::event_loop::ControlFlow::Exit,
controls.manage_event(&ev, &mut camera, &renderer);
// Escape key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Escape),
state: ElementState::Pressed,
..
},
..
},
..
} => *control_flow = glutin::event_loop::ControlFlow::Exit,
match ev {
// Close window
Event::WindowEvent {
event: WindowEvent::CloseRequested, ..
} => closed = true,
// R key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::R),
state: ElementState::Pressed,
..
},
..
},
..
} => {
if !recording {
path.clear();
recording = true;
} else {
recording = false;
let string = serde_json::to_string(&path).unwrap();
let mut file = File::create(format!("path-{}.json", path_count)).unwrap();
file.write_all(string.as_bytes()).unwrap();
path_count += 1;
}
// Escape key
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Escape),
state: ElementState::Pressed, ..
}, ..
}, ..
} => closed = true,
// R key
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::R),
state: ElementState::Pressed, ..
}, ..
}, ..
} => {
if ! recording {
path.clear();
recording = true;
} else {
recording = false;
let string = serde_json::to_string(&path).unwrap();
let mut file = File::create(format!("path-{}.json", path_count)).unwrap();
file.write_all(string.as_bytes()).unwrap();
path_count += 1;
}
},
// Enter key
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Return),
state: ElementState::Pressed, ..
}, ..
}, ..
} => {
trace!("Camera:");
let world_position = camera.position * size_f64 + center_f64;
let world_target = camera.target * size_f64 + center_f64;
trace!("\tPosition: ({}, {}, {})",
world_position.x(), world_position.y(), world_position.z());
trace!("\tTarget: ({}, {}, {})",
world_target.x(), world_target.y(), world_target.z());
trace!("\tUp: ({}, {}, {})",
camera.up.x(), camera.up.y(), camera.up.z());
},
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::C),
state: ElementState::Pressed, ..
}, ..
}, ..
} => should_screenshot = true,
_ => (),
}
// Enter key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Return),
state: ElementState::Pressed,
..
},
..
},
..
} => {
trace!("Camera:");
let world_position = camera.position * size_f64 + center_f64;
let world_target = camera.target * size_f64 + center_f64;
trace!(
"\tPosition: ({}, {}, {})",
world_position.x(),
world_position.y(),
world_position.z()
);
trace!(
"\tTarget: ({}, {}, {})",
world_target.x(),
world_target.y(),
world_target.z()
);
trace!(
"\tUp: ({}, {}, {})",
camera.up.x(),
camera.up.y(),
camera.up.z()
);
}
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::C),
state: ElementState::Pressed,
..
},
..
},
..
} => should_screenshot = true,
_ => (),
}
});
controls.update(&mut camera, &renderer);
renderer.render(&scene, &camera);
@ -327,5 +299,5 @@ fn main() {
}
before = Instant::now();
});
}
}

View File

@ -1,24 +1,23 @@
//! This module contains the rendering structs.
use std::borrow::Cow;
use std::cell::Ref;
use std::ops::Deref;
use std::borrow::Cow;
use image;
use image::{DynamicImage, ImageBuffer, Rgba};
use image::{ImageBuffer, Rgba, DynamicImage};
use glium::draw_parameters::{Blend, DepthTest};
use glium::glutin::PossiblyCurrent as Pc;
use glium::index::{NoIndices, PrimitiveType};
use glium::program::ProgramCreationInput;
use glium::texture::{RawImage2d, SrgbTexture2d, Texture2dDataSink};
use glium::{Depth, Display, DrawParameters, Frame, Program, Surface, VertexBuffer};
use glium::{Frame, Display, Surface, Program, DrawParameters, Depth, VertexBuffer};
use glium::draw_parameters::{DepthTest, Blend};
use glium::index::{NoIndices, PrimitiveType};
use glium::glutin::GlWindow;
use glium::program::ProgramCreationInput;
use camera::{mat_to_f32, RenderCamera};
use scene::Scene;
use camera::{RenderCamera, mat_to_f32};
use model::{Vertex, Part, Model};
use math::vector::Vector3;
use model::{Model, Part, Vertex};
/// Image data stored as RGBA.
pub struct RgbaImageData {
@ -58,62 +57,61 @@ pub struct Renderer {
}
impl Renderer {
/// Creates the program with the default shader.
pub fn default_shader(display: &Display) -> Program {
Program::new(
display,
ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/default.vert"),
fragment_shader: include_str!("../assets/shaders/default.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
},
)
.unwrap()
Program::new(display, ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/default.vert"),
fragment_shader: include_str!("../assets/shaders/default.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
}).unwrap()
}
/// Creates the shader with one color per face.
pub fn color_shader(display: &Display) -> Program {
Program::new(
display,
ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/color.vert"),
fragment_shader: include_str!("../assets/shaders/color.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: true,
uses_point_size: false,
},
)
.unwrap()
Program::new(display, ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/color.vert"),
fragment_shader: include_str!("../assets/shaders/color.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: true,
uses_point_size: false,
}).unwrap()
}
/// Creates a new renderer from a display.
///
/// Is uses the default shaders and creates an empty vec of models.
pub fn new(display: Display) -> Renderer {
let program = Renderer::default_shader(&display);
Renderer::from_display_and_program(display, program)
}
/// Creates a new colored renderer from a display.
///
/// Is uses the face colors shaders and creates an empty vec of models.
pub fn color(display: Display) -> Renderer {
let program = Renderer::color_shader(&display);
Renderer::from_display_and_program(display, program)
}
/// Creates a new renderer from a program.
///
/// It allows you to use a custom shader.
pub fn from_display_and_program(display: Display, program: Program) -> Renderer {
let image = RawImage2d::from_raw_rgba(vec![1.0, 1.0, 1.0, 1.0], (1, 1));
let texture = SrgbTexture2d::new(&display, image).ok().unwrap();
@ -128,17 +126,18 @@ impl Renderer {
renderer.capture();
renderer
}
/// Returns the inner GlWindows.
pub fn gl_window(&self) -> Ref<'_, impl Deref<Target = glium::glutin::WindowedContext<Pc>>> {
pub fn gl_window(&self) -> Ref<GlWindow> {
self.display.gl_window()
}
/// Creates a SrgbTexture from a path to an image.
pub fn make_texture(&self, path: &str) -> SrgbTexture2d {
let image = match image::open(path) {
Ok(r) => r.to_rgba8(),
Ok(r) => r.to_rgba(),
Err(e) => panic!("Error while opening file {}: {}", path, e),
};
@ -146,23 +145,14 @@ impl Renderer {
}
/// Creates a SrgbTexture from an image buffer.
pub fn make_texture_from_buffer(
&self,
buffer: ImageBuffer<Rgba<u8>, Vec<u8>>,
) -> SrgbTexture2d {
pub fn make_texture_from_buffer(&self, buffer: ImageBuffer<Rgba<u8>, Vec<u8>>) -> SrgbTexture2d {
let dimensions = buffer.dimensions();
let buffer = RawImage2d::from_raw_rgba_reversed(&buffer.into_raw(), dimensions);
SrgbTexture2d::new(&self.display, buffer).ok().unwrap()
}
/// Creates a 1x1 SrgbTexture with the color passed as parameter.
pub fn make_texture_from_color_channels(
&self,
r: f32,
g: f32,
b: f32,
a: f32,
) -> SrgbTexture2d {
pub fn make_texture_from_color_channels(&self, r: f32, g: f32, b: f32, a: f32) -> SrgbTexture2d {
let image = RawImage2d::from_raw_rgba(vec![r, g, b, a], (1, 1));
SrgbTexture2d::new(&self.display, image).ok().unwrap()
}
@ -180,6 +170,7 @@ impl Renderer {
/// Renders on the display.
pub fn render<C: RenderCamera>(&self, scene: &Scene, camera: &C) {
let mut target = self.draw();
target.clear_color_srgb_and_depth(self.clear_color, 1.0);
@ -190,16 +181,18 @@ impl Renderer {
depth: Depth {
test: DepthTest::IfLess,
write: true,
..Default::default()
.. Default::default()
},
blend: Blend::alpha_blending(),
..Default::default()
.. Default::default()
};
for model in scene.iter() {
let model = &*model.borrow();
for part in &model.parts {
if let &Some(ref buffer) = part.vertex_buffer() {
let diffuse = if let Some(ref name) = part.material_name {
if let None = model.materials.get(name) {
panic!("Material {} not found", name);
@ -213,29 +206,24 @@ impl Renderer {
let texture = self.get_texture_of_part(&model, part);
let (texture, size) = if let Some((texture, size)) = texture {
(
texture,
Vector3::new(size[0] as f32, size[1] as f32, size[2] as f32),
)
(texture, Vector3::new(size[0] as f32, size[1] as f32, size[2] as f32))
} else {
(&self.default_texture, Vector3::new(1.0, 1.0, 1.0))
};
target
.draw(
buffer,
NoIndices(PrimitiveType::TrianglesList),
&self.program,
&uniform!(
diffuse: Into::<[f32; 3]>::into(diffuse),
tex: texture,
perspective: Into::<[[f32; 4]; 4]>::into(perspective),
view: Into::<[[f32; 4]; 4]>::into(view),
texture_size: Into::<[f32; 3]>::into(size),
),
target.draw(
buffer,
NoIndices(PrimitiveType::TrianglesList),
&self.program,
&uniform!(
diffuse: Into::<[f32; 3]>::into(diffuse),
tex: texture,
perspective: Into::<[[f32; 4]; 4]>::into(perspective),
view: Into::<[[f32; 4]; 4]>::into(view),
texture_size: Into::<[f32; 3]>::into(size),
),
&params,
)
.unwrap();
).unwrap();
}
}
}
@ -244,11 +232,7 @@ impl Renderer {
}
/// Renders a part of a model.
fn get_texture_of_part<'a>(
&self,
model: &'a Model,
part: &Part,
) -> Option<(&'a SrgbTexture2d, Vector3<f64>)> {
fn get_texture_of_part<'a>(&self, model: &'a Model, part: &Part) -> Option<(&'a SrgbTexture2d, Vector3<f64>)> {
if let Some(ref material_name) = part.material_name {
if let Some(ref material) = model.materials.get(material_name) {
if let Some((texture, size)) = material.textures.get("map_Kd") {
@ -280,21 +264,21 @@ impl Renderer {
/// Shows the window if hidden.
pub fn show(&mut self) {
self.gl_window().window().set_visible(true);
self.gl_window().show();
}
/// Returns a DynamicImage of the corresponding frame.
pub fn capture(&self) -> DynamicImage {
// Create temporary texture and blit the front buffer to it
let image: RawImage2d<u8> = self.display.read_front_buffer().unwrap();
let image =
ImageBuffer::from_raw(image.width, image.height, image.data.into_owned()).unwrap();
let image: RawImage2d<u8> = self.display.read_front_buffer();
let image = ImageBuffer::from_raw(image.width, image.height, image.data.into_owned()).unwrap();
DynamicImage::ImageRgba8(image).flipv()
}
}
/// Converts a RgbaImamgeData to a DynamicImage.
pub fn rgba_image_data_to_image(image_data: RgbaImageData) -> image::DynamicImage {
let pixels = {
let mut v = Vec::with_capacity(image_data.data.len() * 4);
for (a, b, c, d) in image_data.data {
@ -306,12 +290,15 @@ pub fn rgba_image_data_to_image(image_data: RgbaImageData) -> image::DynamicImag
v
};
// Create ImageBuffer
let image_buffer =
image::ImageBuffer::from_raw(image_data.width, image_data.height, pixels).unwrap();
image::ImageBuffer::from_raw(image_data.width, image_data.height, pixels)
.unwrap();
// Save the screenshot to file
let image = image::DynamicImage::ImageRgba8(image_buffer).flipv();
image
}