Update glium

This commit is contained in:
Thomas Forgione 2022-08-14 14:25:17 +02:00
parent 43d621bc1d
commit 238f523379
4 changed files with 353 additions and 287 deletions

View File

@ -7,7 +7,7 @@ authors = ["Thomas Forgione <thomas@tforgione.fr>"]
log = "0.4"
stderrlog = "0.4.1"
num = "0.1.42"
glium = "0.22.0"
glium = "0.32.1"
image = "0.23"
byteorder = "1.2.3"
clap = "2.31.2"

View File

@ -2,30 +2,20 @@
const EPSILON: f64 = 0.001;
use glium::glutin::{
Event,
WindowEvent,
ElementState,
MouseButton,
MouseScrollDelta,
KeyboardInput,
VirtualKeyCode,
};
use glium::glutin::dpi::{
LogicalSize,
LogicalPosition,
use glium::glutin::dpi::{PhysicalPosition, PhysicalSize};
use glium::glutin::event::{
ElementState, Event, KeyboardInput, MouseButton, MouseScrollDelta, VirtualKeyCode, WindowEvent,
};
use math::vector::{Vector2, Vector3};
use camera::Camera;
use renderer::Renderer;
use math::vector::{Vector2, Vector3};
use model::Model;
use renderer::Renderer;
/// The trait that all controls should implement.
pub trait Controls {
/// Modifies the camera depending on the event.
fn manage_event(&mut self, event: &Event, camera: &mut Camera, renderer: &Renderer);
fn manage_event(&mut self, event: &Event<()>, camera: &mut Camera, renderer: &Renderer);
/// Updates the camera depending on time.
fn update(&mut self, camera: &mut Camera, renderer: &Renderer);
@ -58,7 +48,6 @@ pub struct OrbitControls {
}
impl OrbitControls {
/// Creates a new orbit controls, and initializes the camera.
pub fn new(center: Vector3<f64>, distance: f64, camera: &mut Camera) -> OrbitControls {
let controls = OrbitControls {
@ -90,35 +79,40 @@ impl OrbitControls {
OrbitControls::new(
Vector3::new(center.x() as f64, center.y() as f64, center.z() as f64),
distance as f64,
camera
camera,
)
}
}
impl Controls for OrbitControls {
fn manage_event(&mut self, event: &Event, camera: &mut Camera, _: &Renderer) {
fn manage_event(&mut self, event: &Event<()>, camera: &mut Camera, _: &Renderer) {
match *event {
Event::WindowEvent {
event: WindowEvent::MouseInput {
button: MouseButton::Left,
state, ..
}, ..
event:
WindowEvent::MouseInput {
button: MouseButton::Left,
state,
..
},
..
} => {
self.pressed = state == ElementState::Pressed;
},
}
Event::WindowEvent {
event: WindowEvent::Resized(LogicalSize { width, height }), ..
event: WindowEvent::Resized(PhysicalSize { width, height }),
..
} => {
camera.aspect_ratio = width as f64 / height as f64;
},
}
Event::WindowEvent {
event: WindowEvent::MouseWheel {
delta: MouseScrollDelta::LineDelta(_, y), ..
}, ..
event:
WindowEvent::MouseWheel {
delta: MouseScrollDelta::LineDelta(_, y),
..
},
..
} => {
self.distance -= y as f64 / self.sensitivity;
@ -128,12 +122,15 @@ impl Controls for OrbitControls {
camera.position += self.center;
camera.target = self.center;
},
}
Event::WindowEvent{
event: WindowEvent::CursorMoved {
position: LogicalPosition { x, y }, ..
}, ..
Event::WindowEvent {
event:
WindowEvent::CursorMoved {
position: PhysicalPosition { x, y },
..
},
..
} => {
let current_position = Vector2::new(x as f64, y as f64);
@ -141,11 +138,11 @@ impl Controls for OrbitControls {
let difference = (current_position - self.mouse_position) / self.sensitivity;
self.theta += difference.x();
self.phi += difference.y();
self.phi += difference.y();
use std::f64::consts::PI;
self.phi = self.phi.max(- PI/2.0 + EPSILON);
self.phi = self.phi.min( PI/2.0 - EPSILON);
self.phi = self.phi.max(-PI / 2.0 + EPSILON);
self.phi = self.phi.min(PI / 2.0 - EPSILON);
*camera.position.x_mut() = self.distance * self.phi.cos() * self.theta.cos();
*camera.position.y_mut() = self.distance * self.phi.sin();
@ -153,25 +150,21 @@ impl Controls for OrbitControls {
camera.position += self.center;
camera.target = self.center;
}
// Record new position
self.mouse_position = current_position;
},
}
_ => (),
}
}
fn update(&mut self, _: &mut Camera, _: &Renderer) {
}
fn update(&mut self, _: &mut Camera, _: &Renderer) {}
}
/// First person controls, just like in video games.
pub struct FirstPersonControls {
/// Theta angle of the spheric coordinates of the direction of the camera.
theta: f64,
@ -236,90 +229,121 @@ impl FirstPersonControls {
}
impl Controls for FirstPersonControls {
fn manage_event(&mut self, event: &Event, camera: &mut Camera, renderer: &Renderer) {
fn manage_event(&mut self, event: &Event<()>, camera: &mut Camera, renderer: &Renderer) {
match *event {
// On resize window
Event::WindowEvent {
event: WindowEvent::Resized(LogicalSize { width, height } ), ..
event: WindowEvent::Resized(PhysicalSize { width, height }),
..
} => {
camera.aspect_ratio = width as f64 / height as f64;
},
}
// On Z pressed
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Z), state, ..
}, ..
}, ..
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Z),
state,
..
},
..
},
..
} => {
self.forward_pressed = state == ElementState::Pressed;
},
}
// On S pressed
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::S), state, ..
}, ..
}, ..
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::S),
state,
..
},
..
},
..
} => {
self.backward_pressed = state == ElementState::Pressed;
},
}
// On Q pressed
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Q), state, ..
}, ..
}, ..
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Q),
state,
..
},
..
},
..
} => {
self.left_pressed = state == ElementState::Pressed;
},
}
// On D pressed
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::D), state, ..
}, ..
}, ..
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::D),
state,
..
},
..
},
..
} => {
self.right_pressed = state == ElementState::Pressed;
},
}
// On Space pressed
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Space), state, ..
}, ..
}, ..
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Space),
state,
..
},
..
},
..
} => {
self.boost = state == ElementState::Pressed;
},
}
// On mouse move
Event::WindowEvent {
event: WindowEvent::CursorMoved {
position: LogicalPosition { x, y }, ..
}, ..
event:
WindowEvent::CursorMoved {
position: PhysicalPosition { x, y },
..
},
..
} => {
let size = renderer.gl_window().window().get_inner_size().unwrap();
let size = renderer.gl_window().window().inner_size();
let center = Vector2::new(size.width as f64 / 2.0, size.height as f64 / 2.0);
let current_position = Vector2::new(x as f64, y as f64);
let difference = (current_position - center) / self.sensitivity;
self.theta += difference.x();
self.phi -= difference.y();
self.phi -= difference.y();
use std::f64::consts::PI;
self.phi = self.phi.max(- PI/2.0 + EPSILON);
self.phi = self.phi.min( PI/2.0 - EPSILON);
self.phi = self.phi.max(-PI / 2.0 + EPSILON);
self.phi = self.phi.min(PI / 2.0 - EPSILON);
self.forward = Vector3::new(
self.phi.cos() * self.theta.cos(),
@ -327,29 +351,29 @@ impl Controls for FirstPersonControls {
self.phi.cos() * self.theta.sin(),
);
self.left = Vector3::new(0.0, 1.0, 0.0).cross_product(self.forward).normalized();
self.left = Vector3::new(0.0, 1.0, 0.0)
.cross_product(self.forward)
.normalized();
// Move the cursor back to the center
renderer
.gl_window()
.window()
.set_cursor_position(LogicalPosition::new(
size.width / 2.0, size.height / 2.0))
.set_cursor_position(PhysicalPosition::new(
size.width as f64 / 2.0,
size.height as f64 / 2.0,
))
.unwrap();
},
}
_ => (),
}
self.update_camera(camera);
}
fn update(&mut self, camera: &mut Camera, renderer: &Renderer) {
renderer.gl_window().hide_cursor(true);
renderer.gl_window().window().set_cursor_visible(false);
let mut speed = Vector3::new(0.0, 0.0, 0.0);

View File

@ -6,37 +6,33 @@ extern crate serde_derive;
#[macro_use]
extern crate log;
extern crate stderrlog;
extern crate clap;
extern crate glium;
extern crate model_converter;
extern crate stderrlog;
use std::fs::File;
use std::io::Write;
use std::process::exit;
use std::time::{Instant, Duration};
use std::thread::sleep;
use std::time::{Duration, Instant};
use clap::{App, Arg};
use glium::Display;
use glium::glutin;
use glium::glutin::{EventsLoop, WindowBuilder};
use glium::glutin::event_loop::EventLoop;
use glium::glutin::window::WindowBuilder;
use glium::Display;
use glium::glutin::{
Event,
WindowEvent,
VirtualKeyCode,
ElementState,
};
use glium::glutin::event::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};
use model_converter::scene::Scene;
use model_converter::camera::Camera;
use model_converter::controls::{FirstPersonControls, OrbitControls};
use model_converter::math::bounding_box::BoundingBox3;
use model_converter::math::vector::Vector3;
use model_converter::parser::parse_file;
use model_converter::renderer::Renderer;
use model_converter::controls::{OrbitControls, FirstPersonControls};
use model_converter::camera::Camera;
use model_converter::scene::Scene;
fn as_millis(duration: Duration) -> u64 {
duration.as_secs() * 1_000 + (duration.subsec_nanos() as u64) / 1_000_000
@ -66,26 +62,31 @@ struct CameraEvent {
}
fn main() {
let matches = App::new("3D Viewer")
.version("1.0")
.arg(Arg::with_name("input")
.short("i")
.long("input")
.value_name("FILES")
.takes_value(true)
.multiple(true)
.help("Input model files")
.required(true))
.arg(Arg::with_name("first person")
.short("f")
.long("first-person")
.help("Uses first person controls instead of orbit controls"))
.arg(Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Shows logs during the parsing of the model"))
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.value_name("FILES")
.takes_value(true)
.multiple(true)
.help("Input model files")
.required(true),
)
.arg(
Arg::with_name("first person")
.short("f")
.long("first-person")
.help("Uses first person controls instead of orbit controls"),
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.help("Shows logs during the parsing of the model"),
)
.get_matches();
// Set verbose flag
@ -99,16 +100,12 @@ fn main() {
let mut path_count = 0;
let mut path = vec![];
use std::f64::{MIN, MAX};
let mut bbox = BoundingBox3::new(
Vector3::new(MAX, MAX, MAX),
Vector3::new(MIN, MIN, MIN),
);
use std::f64::{MAX, MIN};
let mut bbox = BoundingBox3::new(Vector3::new(MAX, MAX, MAX), Vector3::new(MIN, MIN, MIN));
let mut models = vec![];
for input in matches.values_of("input").unwrap() {
info!("Parsing model {}", input);
match parse_file(&input) {
@ -117,19 +114,19 @@ fn main() {
bbox = bbox.union(&model.bounding_box());
}
models.push((input.to_owned(), model))
},
}
Err(e) => {
error!("Error while parsing file: {}", e);
exit(1);
},
}
}
}
let mut events_loop = EventsLoop::new();
let window = WindowBuilder::new().with_visibility(false);
let event_loop = EventLoop::new();
let window = WindowBuilder::new().with_visible(false);
let context = glutin::ContextBuilder::new().with_depth_buffer(24);
let display = Display::new(window, context, &events_loop).unwrap();
let display = Display::new(window, context, &event_loop).unwrap();
let mut renderer = Renderer::new(display);
let mut scene = Scene::new();
@ -163,12 +160,10 @@ fn main() {
let center_f64 = Vector3::new(center.x() as f64, center.y() as f64, center.z() as f64);
let size_f64 = size as f64;
let mut closed = false;
let mut camera = Camera::new(
Vector3::new( 0.0, 0.0, 0.0),
Vector3::new( 0.0, 0.0, 0.0),
Vector3::new( 0.0, 1.0, 0.0),
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 0.0, 0.0),
Vector3::new(0.0, 1.0, 0.0),
);
camera.z_near = 0.0001;
@ -179,7 +174,7 @@ fn main() {
Box::new(OrbitControls::new(
Vector3::new(0.0, 0.0, 0.0),
1.0,
&mut camera
&mut camera,
))
};
@ -190,86 +185,120 @@ fn main() {
let mut recording = false;
let mut before = Instant::now();
while !closed {
event_loop.run(move |ev, _, control_flow| {
let mut should_screenshot = false;
controls.manage_event(&ev, &mut camera, &renderer);
events_loop.poll_events(|ev| {
match ev {
// No idea what this is
Event::NewEvents(cause) => match cause {
glium::glutin::event::StartCause::ResumeTimeReached { .. } => (),
glium::glutin::event::StartCause::Init => (),
_ => return,
},
controls.manage_event(&ev, &mut camera, &renderer);
// Close window
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => *control_flow = glutin::event_loop::ControlFlow::Exit,
match ev {
// Close window
Event::WindowEvent {
event: WindowEvent::CloseRequested, ..
} => closed = true,
// Escape key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Escape),
state: ElementState::Pressed,
..
},
..
},
..
} => *control_flow = glutin::event_loop::ControlFlow::Exit,
// Escape key
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Escape),
state: ElementState::Pressed, ..
}, ..
}, ..
} => closed = true,
// R key
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::R),
state: ElementState::Pressed, ..
}, ..
}, ..
} => {
if ! recording {
path.clear();
recording = true;
} else {
recording = false;
let string = serde_json::to_string(&path).unwrap();
let mut file = File::create(format!("path-{}.json", path_count)).unwrap();
file.write_all(string.as_bytes()).unwrap();
path_count += 1;
}
},
// Enter key
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Return),
state: ElementState::Pressed, ..
}, ..
}, ..
} => {
trace!("Camera:");
let world_position = camera.position * size_f64 + center_f64;
let world_target = camera.target * size_f64 + center_f64;
trace!("\tPosition: ({}, {}, {})",
world_position.x(), world_position.y(), world_position.z());
trace!("\tTarget: ({}, {}, {})",
world_target.x(), world_target.y(), world_target.z());
trace!("\tUp: ({}, {}, {})",
camera.up.x(), camera.up.y(), camera.up.z());
},
Event::WindowEvent {
event: WindowEvent::KeyboardInput {
input: glutin::KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::C),
state: ElementState::Pressed, ..
}, ..
}, ..
} => should_screenshot = true,
_ => (),
// R key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::R),
state: ElementState::Pressed,
..
},
..
},
..
} => {
if !recording {
path.clear();
recording = true;
} else {
recording = false;
let string = serde_json::to_string(&path).unwrap();
let mut file = File::create(format!("path-{}.json", path_count)).unwrap();
file.write_all(string.as_bytes()).unwrap();
path_count += 1;
}
}
});
// Enter key
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::Return),
state: ElementState::Pressed,
..
},
..
},
..
} => {
trace!("Camera:");
let world_position = camera.position * size_f64 + center_f64;
let world_target = camera.target * size_f64 + center_f64;
trace!(
"\tPosition: ({}, {}, {})",
world_position.x(),
world_position.y(),
world_position.z()
);
trace!(
"\tTarget: ({}, {}, {})",
world_target.x(),
world_target.y(),
world_target.z()
);
trace!(
"\tUp: ({}, {}, {})",
camera.up.x(),
camera.up.y(),
camera.up.z()
);
}
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
virtual_keycode: Some(VirtualKeyCode::C),
state: ElementState::Pressed,
..
},
..
},
..
} => should_screenshot = true,
_ => (),
}
controls.update(&mut camera, &renderer);
renderer.render(&scene, &camera);
@ -298,5 +327,5 @@ fn main() {
}
before = Instant::now();
}
});
}

View File

@ -1,23 +1,24 @@
//! This module contains the rendering structs.
use std::cell::Ref;
use std::borrow::Cow;
use std::cell::Ref;
use std::ops::Deref;
use image;
use image::{ImageBuffer, Rgba, DynamicImage};
use image::{DynamicImage, ImageBuffer, Rgba};
use glium::texture::{RawImage2d, SrgbTexture2d, Texture2dDataSink};
use glium::{Frame, Display, Surface, Program, DrawParameters, Depth, VertexBuffer};
use glium::draw_parameters::{DepthTest, Blend};
use glium::draw_parameters::{Blend, DepthTest};
use glium::glutin::{ContextCurrentState, PossiblyCurrent as Pc};
use glium::index::{NoIndices, PrimitiveType};
use glium::glutin::GlWindow;
use glium::program::ProgramCreationInput;
use glium::texture::{RawImage2d, SrgbTexture2d, Texture2dDataSink};
use glium::{Depth, Display, DrawParameters, Frame, Program, Surface, VertexBuffer};
use camera::{mat_to_f32, RenderCamera};
use scene::Scene;
use camera::{RenderCamera, mat_to_f32};
use model::{Vertex, Part, Model};
use math::vector::Vector3;
use model::{Model, Part, Vertex};
/// Image data stored as RGBA.
pub struct RgbaImageData {
@ -57,61 +58,62 @@ pub struct Renderer {
}
impl Renderer {
/// Creates the program with the default shader.
pub fn default_shader(display: &Display) -> Program {
Program::new(display, ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/default.vert"),
fragment_shader: include_str!("../assets/shaders/default.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
}).unwrap()
Program::new(
display,
ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/default.vert"),
fragment_shader: include_str!("../assets/shaders/default.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: false,
uses_point_size: false,
},
)
.unwrap()
}
/// Creates the shader with one color per face.
pub fn color_shader(display: &Display) -> Program {
Program::new(display, ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/color.vert"),
fragment_shader: include_str!("../assets/shaders/color.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: true,
uses_point_size: false,
}).unwrap()
Program::new(
display,
ProgramCreationInput::SourceCode {
vertex_shader: include_str!("../assets/shaders/color.vert"),
fragment_shader: include_str!("../assets/shaders/color.frag"),
geometry_shader: None,
tessellation_control_shader: None,
tessellation_evaluation_shader: None,
transform_feedback_varyings: None,
outputs_srgb: true,
uses_point_size: false,
},
)
.unwrap()
}
/// Creates a new renderer from a display.
///
/// Is uses the default shaders and creates an empty vec of models.
pub fn new(display: Display) -> Renderer {
let program = Renderer::default_shader(&display);
Renderer::from_display_and_program(display, program)
}
/// Creates a new colored renderer from a display.
///
/// Is uses the face colors shaders and creates an empty vec of models.
pub fn color(display: Display) -> Renderer {
let program = Renderer::color_shader(&display);
Renderer::from_display_and_program(display, program)
}
/// Creates a new renderer from a program.
///
/// It allows you to use a custom shader.
pub fn from_display_and_program(display: Display, program: Program) -> Renderer {
let image = RawImage2d::from_raw_rgba(vec![1.0, 1.0, 1.0, 1.0], (1, 1));
let texture = SrgbTexture2d::new(&display, image).ok().unwrap();
@ -126,11 +128,10 @@ impl Renderer {
renderer.capture();
renderer
}
/// Returns the inner GlWindows.
pub fn gl_window(&self) -> Ref<GlWindow> {
pub fn gl_window(&self) -> Ref<'_, impl Deref<Target = glium::glutin::WindowedContext<Pc>>> {
self.display.gl_window()
}
@ -145,14 +146,23 @@ impl Renderer {
}
/// Creates a SrgbTexture from an image buffer.
pub fn make_texture_from_buffer(&self, buffer: ImageBuffer<Rgba<u8>, Vec<u8>>) -> SrgbTexture2d {
pub fn make_texture_from_buffer(
&self,
buffer: ImageBuffer<Rgba<u8>, Vec<u8>>,
) -> SrgbTexture2d {
let dimensions = buffer.dimensions();
let buffer = RawImage2d::from_raw_rgba_reversed(&buffer.into_raw(), dimensions);
SrgbTexture2d::new(&self.display, buffer).ok().unwrap()
}
/// Creates a 1x1 SrgbTexture with the color passed as parameter.
pub fn make_texture_from_color_channels(&self, r: f32, g: f32, b: f32, a: f32) -> SrgbTexture2d {
pub fn make_texture_from_color_channels(
&self,
r: f32,
g: f32,
b: f32,
a: f32,
) -> SrgbTexture2d {
let image = RawImage2d::from_raw_rgba(vec![r, g, b, a], (1, 1));
SrgbTexture2d::new(&self.display, image).ok().unwrap()
}
@ -170,7 +180,6 @@ impl Renderer {
/// Renders on the display.
pub fn render<C: RenderCamera>(&self, scene: &Scene, camera: &C) {
let mut target = self.draw();
target.clear_color_srgb_and_depth(self.clear_color, 1.0);
@ -181,18 +190,16 @@ impl Renderer {
depth: Depth {
test: DepthTest::IfLess,
write: true,
.. Default::default()
..Default::default()
},
blend: Blend::alpha_blending(),
.. Default::default()
..Default::default()
};
for model in scene.iter() {
let model = &*model.borrow();
for part in &model.parts {
if let &Some(ref buffer) = part.vertex_buffer() {
let diffuse = if let Some(ref name) = part.material_name {
if let None = model.materials.get(name) {
panic!("Material {} not found", name);
@ -206,24 +213,29 @@ impl Renderer {
let texture = self.get_texture_of_part(&model, part);
let (texture, size) = if let Some((texture, size)) = texture {
(texture, Vector3::new(size[0] as f32, size[1] as f32, size[2] as f32))
(
texture,
Vector3::new(size[0] as f32, size[1] as f32, size[2] as f32),
)
} else {
(&self.default_texture, Vector3::new(1.0, 1.0, 1.0))
};
target.draw(
buffer,
NoIndices(PrimitiveType::TrianglesList),
&self.program,
&uniform!(
diffuse: Into::<[f32; 3]>::into(diffuse),
tex: texture,
perspective: Into::<[[f32; 4]; 4]>::into(perspective),
view: Into::<[[f32; 4]; 4]>::into(view),
texture_size: Into::<[f32; 3]>::into(size),
),
target
.draw(
buffer,
NoIndices(PrimitiveType::TrianglesList),
&self.program,
&uniform!(
diffuse: Into::<[f32; 3]>::into(diffuse),
tex: texture,
perspective: Into::<[[f32; 4]; 4]>::into(perspective),
view: Into::<[[f32; 4]; 4]>::into(view),
texture_size: Into::<[f32; 3]>::into(size),
),
&params,
).unwrap();
)
.unwrap();
}
}
}
@ -232,7 +244,11 @@ impl Renderer {
}
/// Renders a part of a model.
fn get_texture_of_part<'a>(&self, model: &'a Model, part: &Part) -> Option<(&'a SrgbTexture2d, Vector3<f64>)> {
fn get_texture_of_part<'a>(
&self,
model: &'a Model,
part: &Part,
) -> Option<(&'a SrgbTexture2d, Vector3<f64>)> {
if let Some(ref material_name) = part.material_name {
if let Some(ref material) = model.materials.get(material_name) {
if let Some((texture, size)) = material.textures.get("map_Kd") {
@ -264,21 +280,21 @@ impl Renderer {
/// Shows the window if hidden.
pub fn show(&mut self) {
self.gl_window().show();
self.gl_window().window().set_visible(true);
}
/// Returns a DynamicImage of the corresponding frame.
pub fn capture(&self) -> DynamicImage {
// Create temporary texture and blit the front buffer to it
let image: RawImage2d<u8> = self.display.read_front_buffer();
let image = ImageBuffer::from_raw(image.width, image.height, image.data.into_owned()).unwrap();
let image: RawImage2d<u8> = self.display.read_front_buffer().unwrap();
let image =
ImageBuffer::from_raw(image.width, image.height, image.data.into_owned()).unwrap();
DynamicImage::ImageRgba8(image).flipv()
}
}
/// Converts a RgbaImamgeData to a DynamicImage.
pub fn rgba_image_data_to_image(image_data: RgbaImageData) -> image::DynamicImage {
let pixels = {
let mut v = Vec::with_capacity(image_data.data.len() * 4);
for (a, b, c, d) in image_data.data {
@ -290,15 +306,12 @@ pub fn rgba_image_data_to_image(image_data: RgbaImageData) -> image::DynamicImag
v
};
// Create ImageBuffer
let image_buffer =
image::ImageBuffer::from_raw(image_data.width, image_data.height, pixels)
.unwrap();
image::ImageBuffer::from_raw(image_data.width, image_data.height, pixels).unwrap();
// Save the screenshot to file
let image = image::DynamicImage::ImageRgba8(image_buffer).flipv();
image
}