[rs] Demonstrate obj loading in the skybox example

This commit is contained in:
Dzmitry Malyshau 2021-02-04 18:10:38 -05:00
parent 218556c953
commit 710901b1aa
10 changed files with 6914 additions and 63 deletions

2
.gitattributes vendored Normal file
View File

@ -0,0 +1,2 @@
*.mtl binary
*.obj binary

View File

@ -56,9 +56,10 @@ ddsfile = "0.4"
log = "0.4"
naga = { git = "https://github.com/gfx-rs/naga", tag = "gfx-10", features = ["wgsl-in"] }
noise = "0.7"
obj = "0.10"
png = "0.16"
rand = { version = "0.7.2", features = ["wasm-bindgen"] }
winit = { version = "0.24.0", features = ["web-sys"] }
winit = { version = "0.24", features = ["web-sys"] }
[target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]
async-executor = "1.0"
@ -76,8 +77,8 @@ path="examples/hello-compute/main.rs"
test = true
[patch."https://github.com/gfx-rs/wgpu"]
#wgpu-types = { version = "0.6", path = "../wgpu/wgpu-types" }
#wgpu-core = { version = "0.6", path = "../wgpu/wgpu-core" }
#wgpu-types = { path = "../wgpu/wgpu-types" }
#wgpu-core = { path = "../wgpu/wgpu-core" }
[patch."https://github.com/gfx-rs/subscriber"]
#wgpu-subscriber = { version = "0.1", path = "../subscriber" }

View File

@ -12,7 +12,8 @@ All framework-based examples render to the window.
## Feature matrix
| Feature | boids | cube | mipmap | msaa-line | shadow | skybox | texture-arrays | water |
| ---------------------- | ------ | ------ | ------ | --------- | ------ | ------ | -------------- | ------ |
| vertex attributes | :star: | :star: | :star: | :star: | :star: | | :star: | :star: |
| WGSL shaders | :star: | :star: | :star: | :star: | :star: | :star: | | |
| vertex attributes | :star: | :star: | :star: | :star: | :star: | :star: | :star: | :star: |
| instancing | :star: | | | | | | | |
| lines and points | | | | :star: | | | | |
| dynamic buffer offsets | | | | | :star: | | | |
@ -26,20 +27,21 @@ All framework-based examples render to the window.
| multisampling | | | | :star: | | | | |
| off-screen rendering | | | | | :star: | | | :star: |
| stencil testing | | | | | | | | |
| depth testing | | | | | :star: | | | :star: |
| depth testing | | | | | :star: | :star: | | :star: |
| depth biasing | | | | | :star: | | | |
| read-only depth | | | | | | | | :star: |
| blending | | :star: | | | | | | :star: |
| render bundles | | | | :star: | | | | :star: |
| compute passes | :star: | | | | | | | |
| optional extensions | | | | | | | :star: | |
| *optional extensions* | | | | | | | :star: | |
| - binding indexing | | | | | | | :star: | |
| - push constants | | | | | | | :star: | |
| - depth clamping | | | | | :star: | | | |
| - compressed textures | | | | | | :star: | | |
| - polygon mode | | :star: | | | | | | |
| - queries | | | :star: | | | | | |
| WGSL shaders | :star: | :star: | :star: | :star: | :star: | :star: | | |
| *integrations* | | | | | | | | |
| - obj loading | | | | | | :star: | | |
## Hacking

View File

@ -1,6 +1,7 @@
# skybox
This animated example demonstrates rendering a skybox.
This animated example demonstrates oading a Wavefront OBJ model, and rendering it with skybox and simple reflections.
It hooks up `winit` mouse controls for camera rotation around the model at the center.
## To Run
@ -11,5 +12,3 @@ cargo run --example skybox
## Screenshots
![Skybox](./screenshot.png)
![Skybox 2](./screenshot2.png)

View File

@ -1,45 +1,75 @@
#[path = "../framework.rs"]
mod framework;
use bytemuck::{Pod, Zeroable};
use cgmath::SquareMatrix;
use std::borrow::Cow;
use wgpu::util::DeviceExt;
const IMAGE_SIZE: u32 = 128;
type Uniform = cgmath::Matrix4<f32>;
type Uniforms = [Uniform; 2];
#[derive(Clone, Copy, Pod, Zeroable)]
#[repr(C)]
struct Vertex {
pos: [f32; 3],
normal: [f32; 3],
}
fn raw_uniforms(uniforms: &Uniforms) -> [f32; 16 * 2] {
let mut raw = [0f32; 16 * 2];
raw[..16].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&uniforms[0])[..]);
raw[16..].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&uniforms[1])[..]);
raw
struct Entity {
vertex_count: u32,
vertex_buf: wgpu::Buffer,
}
// Note: we use the Y=up coordinate space in this example.
struct Camera {
screen_size: (u32, u32),
angle_y: f32,
angle_xz: f32,
dist: f32,
}
const MODEL_CENTER_Y: f32 = 2.0;
impl Camera {
fn to_uniform_data(&self) -> [f32; 16 * 3 + 4] {
let aspect = self.screen_size.0 as f32 / self.screen_size.1 as f32;
let mx_projection = cgmath::perspective(cgmath::Deg(45f32), aspect, 1.0, 50.0);
let cam_pos = cgmath::Point3::new(
self.angle_xz.cos() * self.angle_y.sin() * self.dist,
self.angle_xz.sin() * self.dist + MODEL_CENTER_Y,
self.angle_xz.cos() * self.angle_y.cos() * self.dist,
);
let mx_view = cgmath::Matrix4::look_at_rh(
cam_pos,
cgmath::Point3::new(0f32, MODEL_CENTER_Y, 0.0),
cgmath::Vector3::unit_y(),
);
let proj = framework::OPENGL_TO_WGPU_MATRIX * mx_projection;
let proj_inv = proj.invert().unwrap();
let view = framework::OPENGL_TO_WGPU_MATRIX * mx_view;
let mut raw = [0f32; 16 * 3 + 4];
raw[..16].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&proj)[..]);
raw[16..32].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&proj_inv)[..]);
raw[32..48].copy_from_slice(&AsRef::<[f32; 16]>::as_ref(&view)[..]);
raw[48..51].copy_from_slice(AsRef::<[f32; 3]>::as_ref(&cam_pos));
raw[51] = 1.0;
raw
}
}
pub struct Skybox {
aspect: f32,
pipeline: wgpu::RenderPipeline,
camera: Camera,
sky_pipeline: wgpu::RenderPipeline,
entity_pipeline: wgpu::RenderPipeline,
bind_group: wgpu::BindGroup,
uniform_buf: wgpu::Buffer,
uniforms: Uniforms,
entities: Vec<Entity>,
depth_view: wgpu::TextureView,
staging_belt: wgpu::util::StagingBelt,
}
impl Skybox {
fn generate_uniforms(aspect_ratio: f32) -> Uniforms {
use cgmath::SquareMatrix;
let mx_projection = cgmath::perspective(cgmath::Deg(45f32), aspect_ratio, 1.0, 10.0);
let mx_view = cgmath::Matrix4::look_at_rh(
cgmath::Point3::new(1.5f32, -5.0, 3.0),
cgmath::Point3::new(0f32, 0.0, 0.0),
cgmath::Vector3::unit_z(),
);
let mx_correction = framework::OPENGL_TO_WGPU_MATRIX;
let proj_inv = (mx_correction * mx_projection).invert().unwrap();
[proj_inv, mx_correction * mx_view]
}
}
const DEPTH_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Depth24Plus;
impl framework::Example for Skybox {
fn optional_features() -> wgpu::Features {
@ -54,6 +84,39 @@ impl framework::Example for Skybox {
device: &wgpu::Device,
queue: &wgpu::Queue,
) -> Self {
let mut entities = Vec::new();
{
let source = include_bytes!("models/teslacyberv3.0.obj");
let data = obj::ObjData::load_buf(&source[..]).unwrap();
let mut vertices = Vec::new();
for object in data.objects {
for group in object.groups {
vertices.clear();
for poly in group.polys {
for end_index in 2..poly.0.len() {
for &index in &[0, end_index - 1, end_index] {
let obj::IndexTuple(position_id, _texture_id, normal_id) =
poly.0[index];
vertices.push(Vertex {
pos: data.position[position_id],
normal: data.normal[normal_id.unwrap()],
})
}
}
}
let vertex_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Vertex"),
contents: bytemuck::cast_slice(&vertices),
usage: wgpu::BufferUsage::VERTEX,
});
entities.push(Entity {
vertex_count: vertices.len() as u32,
vertex_buf,
});
}
}
}
let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: None,
entries: &[
@ -103,11 +166,16 @@ impl framework::Example for Skybox {
flags,
});
let aspect = sc_desc.width as f32 / sc_desc.height as f32;
let uniforms = Self::generate_uniforms(aspect);
let camera = Camera {
screen_size: (sc_desc.width, sc_desc.height),
angle_xz: 0.2,
angle_y: 0.2,
dist: 30.0,
};
let raw_uniforms = camera.to_uniform_data();
let uniform_buf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {
label: Some("Buffer"),
contents: bytemuck::cast_slice(&raw_uniforms(&uniforms)),
contents: bytemuck::cast_slice(&raw_uniforms),
usage: wgpu::BufferUsage::UNIFORM | wgpu::BufferUsage::COPY_DST,
});
@ -117,25 +185,63 @@ impl framework::Example for Skybox {
push_constant_ranges: &[],
});
// Create the render pipeline
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: None,
// Create the render pipelines
let sky_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Sky"),
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
entry_point: "vs_sky",
buffers: &[],
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
entry_point: "fs_sky",
targets: &[sc_desc.format.into()],
}),
primitive: wgpu::PrimitiveState {
front_face: wgpu::FrontFace::Cw,
..Default::default()
},
depth_stencil: None,
depth_stencil: Some(wgpu::DepthStencilState {
format: DEPTH_FORMAT,
depth_write_enabled: false,
depth_compare: wgpu::CompareFunction::LessEqual,
stencil: wgpu::StencilState::default(),
bias: wgpu::DepthBiasState::default(),
clamp_depth: false,
}),
multisample: wgpu::MultisampleState::default(),
});
let entity_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("Entity"),
layout: Some(&pipeline_layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_entity",
buffers: &[wgpu::VertexBufferLayout {
array_stride: std::mem::size_of::<Vertex>() as wgpu::BufferAddress,
step_mode: wgpu::InputStepMode::Vertex,
attributes: &wgpu::vertex_attr_array![0 => Float3, 1 => Float3],
}],
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_entity",
targets: &[sc_desc.format.into()],
}),
primitive: wgpu::PrimitiveState {
front_face: wgpu::FrontFace::Cw,
..Default::default()
},
depth_stencil: Some(wgpu::DepthStencilState {
format: DEPTH_FORMAT,
depth_write_enabled: true,
depth_compare: wgpu::CompareFunction::LessEqual,
stencil: wgpu::StencilState::default(),
bias: wgpu::DepthBiasState::default(),
clamp_depth: false,
}),
multisample: wgpu::MultisampleState::default(),
});
@ -228,31 +334,51 @@ impl framework::Example for Skybox {
label: None,
});
let depth_texture = device.create_texture(&wgpu::TextureDescriptor {
size: wgpu::Extent3d {
width: sc_desc.width,
height: sc_desc.height,
depth: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: DEPTH_FORMAT,
usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
label: None,
});
Skybox {
pipeline,
camera,
sky_pipeline,
entity_pipeline,
bind_group,
uniform_buf,
aspect,
uniforms,
entities,
depth_view: depth_texture.create_view(&wgpu::TextureViewDescriptor::default()),
staging_belt: wgpu::util::StagingBelt::new(0x100),
}
}
fn update(&mut self, _event: winit::event::WindowEvent) {
//empty
fn update(&mut self, event: winit::event::WindowEvent) {
match event {
winit::event::WindowEvent::CursorMoved { position, .. } => {
let norm_x = position.x as f32 / self.camera.screen_size.0 as f32 - 0.5;
let norm_y = position.y as f32 / self.camera.screen_size.1 as f32 - 0.5;
self.camera.angle_y = norm_x * 5.0;
self.camera.angle_xz = norm_y;
}
_ => {}
}
}
fn resize(
&mut self,
sc_desc: &wgpu::SwapChainDescriptor,
_device: &wgpu::Device,
queue: &wgpu::Queue,
_queue: &wgpu::Queue,
) {
self.aspect = sc_desc.width as f32 / sc_desc.height as f32;
let uniforms = Skybox::generate_uniforms(self.aspect);
let mx_total = uniforms[0] * uniforms[1];
let mx_ref: &[f32; 16] = mx_total.as_ref();
queue.write_buffer(&self.uniform_buf, 0, bytemuck::cast_slice(mx_ref));
self.camera.screen_size = (sc_desc.width, sc_desc.height);
}
fn render(
@ -266,9 +392,7 @@ impl framework::Example for Skybox {
device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
// update rotation
let rotation = cgmath::Matrix4::<f32>::from_angle_x(cgmath::Deg(0.25));
self.uniforms[1] = self.uniforms[1] * rotation;
let raw_uniforms = raw_uniforms(&self.uniforms);
let raw_uniforms = self.camera.to_uniform_data();
self.staging_belt
.write_buffer(
&mut encoder,
@ -297,11 +421,25 @@ impl framework::Example for Skybox {
store: true,
},
}],
depth_stencil_attachment: None,
depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor {
attachment: &self.depth_view,
depth_ops: Some(wgpu::Operations {
load: wgpu::LoadOp::Clear(1.0),
store: false,
}),
stencil_ops: None,
}),
});
rpass.set_pipeline(&self.pipeline);
rpass.set_bind_group(0, &self.bind_group, &[]);
rpass.set_pipeline(&self.entity_pipeline);
for entity in self.entities.iter() {
rpass.set_vertex_buffer(0, entity.vertex_buf.slice(..));
rpass.draw(0..entity.vertex_count, 0..1);
}
rpass.set_pipeline(&self.sky_pipeline);
rpass.draw(0..3 as u32, 0..1);
}

View File

@ -0,0 +1,62 @@
# Blender MTL File: 'teslacyberv3.0.blend'
# Material Count: 6
newmtl Material
Ns 65.476285
Ka 1.000000 1.000000 1.000000
Kd 0.411568 0.411568 0.411568
Ks 0.614679 0.614679 0.614679
Ke 0.000000 0.000000 0.000000
Ni 36.750000
d 1.000000
illum 3
newmtl Материал
Ns 323.999994
Ka 1.000000 1.000000 1.000000
Kd 0.800000 0.800000 0.800000
Ks 0.500000 0.500000 0.500000
Ke 0.000000 0.000000 0.000000
Ni 1.000000
d 1.000000
illum 2
newmtl Материал.001
Ns 900.000000
Ka 1.000000 1.000000 1.000000
Kd 0.026240 0.026240 0.026240
Ks 0.000000 0.000000 0.000000
Ke 0.000000 0.000000 0.000000
Ni 1.450000
d 1.000000
illum 1
newmtl Материал.002
Ns 0.000000
Ka 1.000000 1.000000 1.000000
Kd 0.031837 0.032429 0.029425
Ks 0.169725 0.169725 0.169725
Ke 0.000000 0.000000 0.000000
Ni 0.000000
d 1.000000
illum 2
newmtl Материал.003
Ns 900.000000
Ka 1.000000 1.000000 1.000000
Kd 0.023585 0.083235 0.095923
Ks 1.000000 1.000000 1.000000
Ke 0.000000 0.000000 0.000000
Ni 45.049999
d 1.000000
illum 3
newmtl Материал.004
Ns 323.999994
Ka 1.000000 1.000000 1.000000
Kd 0.800000 0.800000 0.800000
Ks 0.500000 0.500000 0.500000
Ke 0.000000 0.000000 0.000000
Ni 1.000000
d 1.000000
illum 2

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 345 KiB

After

Width:  |  Height:  |  Size: 484 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 170 KiB

View File

@ -5,30 +5,49 @@ var<out> out_position: vec4<f32>;
[[block]]
struct Data {
// from camera to screen
proj: mat4x4<f32>;
// from screen to camera
proj_inv: mat4x4<f32>;
// from world to camera
view: mat4x4<f32>;
// camera position
cam_pos: vec4<f32>;
};
[[group(0), binding(0)]]
var r_data: Data;
[[stage(vertex)]]
fn vs_main() {
fn vs_sky() {
// hacky way to draw a large triangle
var tmp1: i32 = i32(in_vertex_index) / 2;
var tmp2: i32 = i32(in_vertex_index) & 1;
const pos: vec4<f32> = vec4<f32>(
f32(tmp1) * 4.0 - 1.0,
f32(tmp2) * 4.0 - 1.0,
0.0,
1.0,
1.0
);
// transposition = inversion for this orthonormal matrix
const inv_model_view: mat3x3<f32> = transpose(mat3x3<f32>(r_data.view.x.xyz, r_data.view.y.xyz, r_data.view.z.xyz));
var unprojected: vec4<f32> = r_data.proj_inv * pos; //TODO: const
out_uv = inv_model_view * unprojected.xyz;
out_position = pos;
}
[[location(0)]] var<in> in_pos: vec3<f32>;
[[location(1)]] var<in> in_normal: vec3<f32>;
[[location(1)]] var<out> out_normal: vec3<f32>;
[[location(3)]] var<out> out_view: vec3<f32>;
[[stage(vertex)]]
fn vs_entity() {
out_normal = in_normal;
out_view = in_pos - r_data.cam_pos.xyz;
out_position = r_data.proj * r_data.view * vec4<f32>(in_pos, 1.0);
}
[[group(0), binding(1)]]
var r_texture: texture_cube<f32>;
[[group(0), binding(2)]]
@ -36,8 +55,19 @@ var r_sampler: sampler;
[[location(0)]] var<in> in_uv: vec3<f32>;
[[location(0)]] var<out> out_color: vec4<f32>;
[[location(3)]] var<in> in_view: vec3<f32>;
[[stage(fragment)]]
fn fs_main() {
fn fs_sky() {
out_color = textureSample(r_texture, r_sampler, in_uv);
}
[[stage(fragment)]]
fn fs_entity() {
const incident: vec3<f32> = normalize(in_view);
const normal: vec3<f32> = normalize(in_normal);
const reflected: vec3<f32> = incident - 2.0 * dot(normal, incident) * normal;
var reflected_color: vec4<f32> = textureSample(r_texture, r_sampler, reflected);
out_color = vec4<f32>(0.1, 0.1, 0.1, 0.1) + 0.5 * reflected_color;
}