Copy examples from example repo (#1023)

*   Update PR template
*   Rename queue to queue_family
*   Add examples readme
*   Use the same image output filename
*   Move existing examples with multiple files into their own folder
*   Improve error message when running runtime-shader in the wrong directory
This commit is contained in:
Lucas Kent 2018-08-30 11:37:51 +10:00 committed by GitHub
parent 97dbbdc848
commit e5d501fb5e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 1787 additions and 31 deletions

View File

@ -1,2 +1,3 @@
- [ ] Added an entry to `CHANGELOG.md` if knowledge of this change could be valuable to users
- [ ] Updated documentation to reflect any user-facing changes
* [ ] Added an entry to `CHANGELOG.md` if knowledge of this change could be valuable to users
* [ ] Updated documentation to reflect any user-facing changes - in this repository
* [ ] Updated documentation to reflect any user-facing changes - PR to the [guide](https://github.com/vulkano-rs/vulkano-www) that fixes existing documentation invalidated by this PR.

19
examples/readme.md Normal file
View File

@ -0,0 +1,19 @@
# Examples
These examples use unreleased functionality of vulkano.
For a snapshot of the examples at the latest vulkano release take a look at the [examples repository](https://github.com/vulkano-rs/vulkano-examples)
## Running the examples:
```sh
cargo run --bin <example>
```
## Example:
```sh
cargo run --bin triangle
```
If you want to compare performances with other libraries, you should pass the `--release` flag as
well. Rust is pretty slow in debug mode.

View File

@ -45,12 +45,12 @@ fn main() {
//
// The Vulkan specs guarantee that a compliant implementation must provide at least one queue
// that supports compute operations.
let queue = physical.queue_families().find(|&q| q.supports_compute()).unwrap();
let queue_family = physical.queue_families().find(|&q| q.supports_compute()).unwrap();
// Now initializing the device.
let (device, mut queues) = {
Device::new(physical, physical.supported_features(), &DeviceExtensions::none(),
[(queue, 0.5)].iter().cloned()).expect("failed to create device")
[(queue_family, 0.5)].iter().cloned()).expect("failed to create device")
};
// Since we can request multiple queues, the `queues` variable is in fact an iterator. In this

View File

@ -92,8 +92,8 @@ fn main() {
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
let physical = PhysicalDevice::enumerate(&instance).next().expect("no device available");
let queue = physical.queue_families().next().expect("couldn't find a queue family");
let (_, mut queues) = Device::new(physical, physical.supported_features(), &DeviceExtensions::none(), vec![(queue, 0.5)]).expect("failed to create device");
let queue_family = physical.queue_families().next().expect("couldn't find a queue family");
let (_, mut queues) = Device::new(physical, physical.supported_features(), &DeviceExtensions::none(), vec![(queue_family, 0.5)]).expect("failed to create device");
let queue = queues.next().unwrap();
// Create an image in order to generate some additional logging:

View File

@ -0,0 +1,186 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer::AutoCommandBuffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::DynamicState;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSet;
use vulkano::device::Queue;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::Subpass;
use vulkano::image::ImageViewAccess;
use vulkano::pipeline::blend::AttachmentBlend;
use vulkano::pipeline::blend::BlendFactor;
use vulkano::pipeline::blend::BlendOp;
use vulkano::pipeline::GraphicsPipeline;
use vulkano::pipeline::GraphicsPipelineAbstract;
use vulkano::pipeline::viewport::Viewport;
use std::sync::Arc;
/// Allows applying an ambient lighting to a scene.
pub struct AmbientLightingSystem {
gfx_queue: Arc<Queue>,
vertex_buffer: Arc<CpuAccessibleBuffer<[Vertex]>>,
pipeline: Arc<GraphicsPipelineAbstract + Send + Sync>,
}
impl AmbientLightingSystem {
/// Initializes the ambient lighting system.
pub fn new<R>(gfx_queue: Arc<Queue>, subpass: Subpass<R>) -> AmbientLightingSystem
where R: RenderPassAbstract + Send + Sync + 'static
{
// TODO: vulkano doesn't allow us to draw without a vertex buffer, otherwise we could
// hard-code these values in the shader
let vertex_buffer = {
CpuAccessibleBuffer::from_iter(gfx_queue.device().clone(), BufferUsage::all(), [
Vertex { position: [-1.0, -1.0] },
Vertex { position: [-1.0, 3.0] },
Vertex { position: [3.0, -1.0] }
].iter().cloned()).expect("failed to create buffer")
};
let pipeline = {
let vs = vs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
let fs = fs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
Arc::new(GraphicsPipeline::start()
.vertex_input_single_buffer::<Vertex>()
.vertex_shader(vs.main_entry_point(), ())
.triangle_list()
.viewports_dynamic_scissors_irrelevant(1)
.fragment_shader(fs.main_entry_point(), ())
.blend_collective(AttachmentBlend {
enabled: true,
color_op: BlendOp::Add,
color_source: BlendFactor::One,
color_destination: BlendFactor::One,
alpha_op: BlendOp::Max,
alpha_source: BlendFactor::One,
alpha_destination: BlendFactor::One,
mask_red: true,
mask_green: true,
mask_blue: true,
mask_alpha: true,
})
.render_pass(subpass)
.build(gfx_queue.device().clone())
.unwrap()) as Arc<_>
};
AmbientLightingSystem {
gfx_queue: gfx_queue,
vertex_buffer: vertex_buffer,
pipeline: pipeline,
}
}
/// Builds a secondary command buffer that applies ambient lighting.
///
/// This secondary command buffer will read `color_input`, multiply it with `ambient_color`
/// and write the output to the current framebuffer with additive blending (in other words
/// the value will be added to the existing value in the framebuffer, and not replace the
/// existing value).
///
/// - `viewport_dimensions` contains the dimensions of the current framebuffer.
/// - `color_input` is an image containing the albedo of each object of the scene. It is the
/// result of the deferred pass.
/// - `ambient_color` is the color to apply.
///
pub fn draw<C>(&self, viewport_dimensions: [u32; 2], color_input: C,
ambient_color: [f32; 3]) -> AutoCommandBuffer
where C: ImageViewAccess + Send + Sync + 'static,
{
let push_constants = fs::ty::PushConstants {
color: [ambient_color[0], ambient_color[1], ambient_color[2], 1.0],
};
let descriptor_set = PersistentDescriptorSet::start(self.pipeline.clone(), 0)
.add_image(color_input)
.unwrap()
.build()
.unwrap();
let dynamic_state = DynamicState {
viewports: Some(vec![Viewport {
origin: [0.0, 0.0],
dimensions: [viewport_dimensions[0] as f32,
viewport_dimensions[1] as f32],
depth_range: 0.0 .. 1.0,
}]),
.. DynamicState::none()
};
AutoCommandBufferBuilder::secondary_graphics(self.gfx_queue.device().clone(),
self.gfx_queue.family(),
self.pipeline.clone().subpass())
.unwrap()
.draw(self.pipeline.clone(),
&dynamic_state,
vec![self.vertex_buffer.clone()],
descriptor_set,
push_constants)
.unwrap()
.build()
.unwrap()
}
}
#[derive(Debug, Clone)]
struct Vertex {
position: [f32; 2]
}
impl_vertex!(Vertex, position);
mod vs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "vertex"]
#[src = "
#version 450
layout(location = 0) in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
"]
struct Dummy;
}
mod fs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "fragment"]
#[src = "
#version 450
// The `color_input` parameter of the `draw` method.
layout(input_attachment_index = 0, set = 0, binding = 0) uniform subpassInput u_diffuse;
layout(push_constant) uniform PushConstants {
// The `ambient_color` parameter of the `draw` method.
vec4 color;
} push_constants;
layout(location = 0) out vec4 f_color;
void main() {
// Load the value at the current pixel.
vec3 in_diffuse = subpassLoad(u_diffuse).rgb;
f_color.rgb = push_constants.color.rgb * in_diffuse;
f_color.a = 1.0;
}
"]
struct Dummy;
}

View File

@ -0,0 +1,209 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer::AutoCommandBuffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::DynamicState;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSet;
use vulkano::device::Queue;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::Subpass;
use vulkano::image::ImageViewAccess;
use vulkano::pipeline::blend::AttachmentBlend;
use vulkano::pipeline::blend::BlendFactor;
use vulkano::pipeline::blend::BlendOp;
use vulkano::pipeline::GraphicsPipeline;
use vulkano::pipeline::GraphicsPipelineAbstract;
use vulkano::pipeline::viewport::Viewport;
use cgmath::Vector3;
use std::sync::Arc;
/// Allows applying a directional ligh source to a scene.
pub struct DirectionalLightingSystem {
gfx_queue: Arc<Queue>,
vertex_buffer: Arc<CpuAccessibleBuffer<[Vertex]>>,
pipeline: Arc<GraphicsPipelineAbstract + Send + Sync>,
}
impl DirectionalLightingSystem {
/// Initializes the directional lighting system.
pub fn new<R>(gfx_queue: Arc<Queue>, subpass: Subpass<R>) -> DirectionalLightingSystem
where R: RenderPassAbstract + Send + Sync + 'static
{
// TODO: vulkano doesn't allow us to draw without a vertex buffer, otherwise we could
// hard-code these values in the shader
let vertex_buffer = {
CpuAccessibleBuffer::from_iter(gfx_queue.device().clone(), BufferUsage::all(), [
Vertex { position: [-1.0, -1.0] },
Vertex { position: [-1.0, 3.0] },
Vertex { position: [3.0, -1.0] }
].iter().cloned()).expect("failed to create buffer")
};
let pipeline = {
let vs = vs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
let fs = fs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
Arc::new(GraphicsPipeline::start()
.vertex_input_single_buffer::<Vertex>()
.vertex_shader(vs.main_entry_point(), ())
.triangle_list()
.viewports_dynamic_scissors_irrelevant(1)
.fragment_shader(fs.main_entry_point(), ())
.blend_collective(AttachmentBlend {
enabled: true,
color_op: BlendOp::Add,
color_source: BlendFactor::One,
color_destination: BlendFactor::One,
alpha_op: BlendOp::Max,
alpha_source: BlendFactor::One,
alpha_destination: BlendFactor::One,
mask_red: true,
mask_green: true,
mask_blue: true,
mask_alpha: true,
})
.render_pass(subpass)
.build(gfx_queue.device().clone())
.unwrap()) as Arc<_>
};
DirectionalLightingSystem {
gfx_queue: gfx_queue,
vertex_buffer: vertex_buffer,
pipeline: pipeline,
}
}
/// Builds a secondary command buffer that applies directional lighting.
///
/// This secondary command buffer will read `color_input` and `normals_input`, and multiply the
/// color with `color` and the dot product of the `direction` with the normal.
/// It then writes the output to the current framebuffer with additive blending (in other words
/// the value will be added to the existing value in the framebuffer, and not replace the
/// existing value).
///
/// Since `normals_input` contains normals in world coordinates, `direction` should also be in
/// world coordinates.
///
/// - `viewport_dimensions` contains the dimensions of the current framebuffer.
/// - `color_input` is an image containing the albedo of each object of the scene. It is the
/// result of the deferred pass.
/// - `normals_input` is an image containing the normals of each object of the scene. It is the
/// result of the deferred pass.
/// - `direction` is the direction of the light in world coordinates.
/// - `color` is the color to apply.
///
pub fn draw<C, N>(&self, viewport_dimensions: [u32; 2], color_input: C, normals_input: N,
direction: Vector3<f32>, color: [f32; 3]) -> AutoCommandBuffer
where C: ImageViewAccess + Send + Sync + 'static,
N: ImageViewAccess + Send + Sync + 'static,
{
let push_constants = fs::ty::PushConstants {
color: [color[0], color[1], color[2], 1.0],
direction: direction.extend(0.0).into(),
};
let descriptor_set = PersistentDescriptorSet::start(self.pipeline.clone(), 0)
.add_image(color_input)
.unwrap()
.add_image(normals_input)
.unwrap()
.build()
.unwrap();
let dynamic_state = DynamicState {
viewports: Some(vec![Viewport {
origin: [0.0, 0.0],
dimensions: [viewport_dimensions[0] as f32,
viewport_dimensions[1] as f32],
depth_range: 0.0 .. 1.0,
}]),
.. DynamicState::none()
};
AutoCommandBufferBuilder::secondary_graphics(self.gfx_queue.device().clone(),
self.gfx_queue.family(),
self.pipeline.clone().subpass())
.unwrap()
.draw(self.pipeline.clone(),
&dynamic_state,
vec![self.vertex_buffer.clone()],
descriptor_set,
push_constants)
.unwrap()
.build()
.unwrap()
}
}
#[derive(Debug, Clone)]
struct Vertex {
position: [f32; 2]
}
impl_vertex!(Vertex, position);
mod vs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "vertex"]
#[src = "
#version 450
layout(location = 0) in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
"]
struct Dummy;
}
mod fs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "fragment"]
#[src = "
#version 450
// The `color_input` parameter of the `draw` method.
layout(input_attachment_index = 0, set = 0, binding = 0) uniform subpassInput u_diffuse;
// The `normals_input` parameter of the `draw` method.
layout(input_attachment_index = 1, set = 0, binding = 1) uniform subpassInput u_normals;
layout(push_constant) uniform PushConstants {
// The `color` parameter of the `draw` method.
vec4 color;
// The `direction` parameter of the `draw` method.
vec4 direction;
} push_constants;
layout(location = 0) out vec4 f_color;
void main() {
vec3 in_normal = normalize(subpassLoad(u_normals).rgb);
// If the normal is perpendicular to the direction of the lighting, then `light_percent` will
// be 0. If the normal is parallel to the direction of the lightin, then `light_percent` will
// be 1. Any other angle will yield an intermediate value.
float light_percent = -dot(push_constants.direction.xyz, in_normal);
// `light_percent` must not go below 0.0. There's no such thing as negative lighting.
light_percent = max(light_percent, 0.0);
vec3 in_diffuse = subpassLoad(u_diffuse).rgb;
f_color.rgb = light_percent * push_constants.color.rgb * in_diffuse;
f_color.a = 1.0;
}
"]
struct Dummy;
}

View File

@ -0,0 +1,24 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
// This module exposes what is needed in order to draw with a deferred rendering system.
//
// The main code is in the `system` module, while the other modules implement the different kinds
// of lighting sources.
pub use self::system::DrawPass;
pub use self::system::FrameSystem;
pub use self::system::Frame;
pub use self::system::LightingPass;
pub use self::system::Pass;
mod ambient_lighting_system;
mod directional_lighting_system;
mod point_lighting_system;
mod system;

View File

@ -0,0 +1,242 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer::AutoCommandBuffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::DynamicState;
use vulkano::descriptor::descriptor_set::PersistentDescriptorSet;
use vulkano::device::Queue;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::Subpass;
use vulkano::image::ImageViewAccess;
use vulkano::pipeline::blend::AttachmentBlend;
use vulkano::pipeline::blend::BlendFactor;
use vulkano::pipeline::blend::BlendOp;
use vulkano::pipeline::GraphicsPipeline;
use vulkano::pipeline::GraphicsPipelineAbstract;
use vulkano::pipeline::viewport::Viewport;
use cgmath::Matrix4;
use cgmath::Vector3;
use std::sync::Arc;
pub struct PointLightingSystem {
gfx_queue: Arc<Queue>,
vertex_buffer: Arc<CpuAccessibleBuffer<[Vertex]>>,
pipeline: Arc<GraphicsPipelineAbstract + Send + Sync>,
}
impl PointLightingSystem {
/// Initializes the point lighting system.
pub fn new<R>(gfx_queue: Arc<Queue>, subpass: Subpass<R>) -> PointLightingSystem
where R: RenderPassAbstract + Send + Sync + 'static
{
// TODO: vulkano doesn't allow us to draw without a vertex buffer, otherwise we could
// hard-code these values in the shader
let vertex_buffer = {
CpuAccessibleBuffer::from_iter(gfx_queue.device().clone(), BufferUsage::all(), [
Vertex { position: [-1.0, -1.0] },
Vertex { position: [-1.0, 3.0] },
Vertex { position: [3.0, -1.0] }
].iter().cloned()).expect("failed to create buffer")
};
let pipeline = {
let vs = vs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
let fs = fs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
Arc::new(GraphicsPipeline::start()
.vertex_input_single_buffer::<Vertex>()
.vertex_shader(vs.main_entry_point(), ())
.triangle_list()
.viewports_dynamic_scissors_irrelevant(1)
.fragment_shader(fs.main_entry_point(), ())
.blend_collective(AttachmentBlend {
enabled: true,
color_op: BlendOp::Add,
color_source: BlendFactor::One,
color_destination: BlendFactor::One,
alpha_op: BlendOp::Max,
alpha_source: BlendFactor::One,
alpha_destination: BlendFactor::One,
mask_red: true,
mask_green: true,
mask_blue: true,
mask_alpha: true,
})
.render_pass(subpass)
.build(gfx_queue.device().clone())
.unwrap()) as Arc<_>
};
PointLightingSystem {
gfx_queue: gfx_queue,
vertex_buffer: vertex_buffer,
pipeline: pipeline,
}
}
/// Builds a secondary command buffer that applies a point lighting.
///
/// This secondary command buffer will read `depth_input` and rebuild the world position of the
/// pixel currently being processed (modulo rounding errors). It will then compare this
/// position with `position`, and process the lighting based on the distance and orientation
/// (similar to the directional lighting system).
///
/// It then writes the output to the current framebuffer with additive blending (in other words
/// the value will be added to the existing value in the framebuffer, and not replace the
/// existing value).
///
/// Note that in a real-world application, you probably want to pass additional parameters
/// such as some way to indicate the distance at which the lighting decrease. In this example
/// this value is hardcoded in the shader.
///
/// - `viewport_dimensions` contains the dimensions of the current framebuffer.
/// - `color_input` is an image containing the albedo of each object of the scene. It is the
/// result of the deferred pass.
/// - `normals_input` is an image containing the normals of each object of the scene. It is the
/// result of the deferred pass.
/// - `depth_input` is an image containing the depth value of each pixel of the scene. It is
/// the result of the deferred pass.
/// - `screen_to_world` is a matrix that turns coordinates from framebuffer space into world
/// space. This matrix is used alongside with `depth_input` to determine the world
/// coorindates of each pixel being processed.
/// - `position` is the position of the spot light in world coordinates.
/// - `color` is the color of the light.
///
pub fn draw<C, N, D>(&self, viewport_dimensions: [u32; 2], color_input: C, normals_input: N,
depth_input: D, screen_to_world: Matrix4<f32>, position: Vector3<f32>,
color: [f32; 3]) -> AutoCommandBuffer
where C: ImageViewAccess + Send + Sync + 'static,
N: ImageViewAccess + Send + Sync + 'static,
D: ImageViewAccess + Send + Sync + 'static,
{
let push_constants = fs::ty::PushConstants {
screen_to_world: screen_to_world.into(),
color: [color[0], color[1], color[2], 1.0],
position: position.extend(0.0).into(),
};
let descriptor_set = PersistentDescriptorSet::start(self.pipeline.clone(), 0)
.add_image(color_input)
.unwrap()
.add_image(normals_input)
.unwrap()
.add_image(depth_input)
.unwrap()
.build()
.unwrap();
let dynamic_state = DynamicState {
viewports: Some(vec![Viewport {
origin: [0.0, 0.0],
dimensions: [viewport_dimensions[0] as f32,
viewport_dimensions[1] as f32],
depth_range: 0.0 .. 1.0,
}]),
.. DynamicState::none()
};
AutoCommandBufferBuilder::secondary_graphics(self.gfx_queue.device().clone(),
self.gfx_queue.family(),
self.pipeline.clone().subpass())
.unwrap()
.draw(self.pipeline.clone(),
&dynamic_state,
vec![self.vertex_buffer.clone()],
descriptor_set,
push_constants)
.unwrap()
.build()
.unwrap()
}
}
#[derive(Debug, Clone)]
struct Vertex {
position: [f32; 2]
}
impl_vertex!(Vertex, position);
mod vs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "vertex"]
#[src = "
#version 450
layout(location = 0) in vec2 position;
layout(location = 0) out vec2 v_screen_coords;
void main() {
v_screen_coords = position;
gl_Position = vec4(position, 0.0, 1.0);
}
"]
struct Dummy;
}
mod fs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "fragment"]
#[src = "
#version 450
// The `color_input` parameter of the `draw` method.
layout(input_attachment_index = 0, set = 0, binding = 0) uniform subpassInput u_diffuse;
// The `normals_input` parameter of the `draw` method.
layout(input_attachment_index = 1, set = 0, binding = 1) uniform subpassInput u_normals;
// The `depth_input` parameter of the `draw` method.
layout(input_attachment_index = 2, set = 0, binding = 2) uniform subpassInput u_depth;
layout(push_constant) uniform PushConstants {
// The `screen_to_world` parameter of the `draw` method.
mat4 screen_to_world;
// The `color` parameter of the `draw` method.
vec4 color;
// The `position` parameter of the `draw` method.
vec4 position;
} push_constants;
layout(location = 0) in vec2 v_screen_coords;
layout(location = 0) out vec4 f_color;
void main() {
float in_depth = subpassLoad(u_depth).x;
// Any depth superior or equal to 1.0 means that the pixel has been untouched by the deferred
// pass. We don't want to deal with them.
if (in_depth >= 1.0) {
discard;
}
// Find the world coordinates of the current pixel.
vec4 world = push_constants.screen_to_world * vec4(v_screen_coords, in_depth, 1.0);
world /= world.w;
vec3 in_normal = normalize(subpassLoad(u_normals).rgb);
vec3 light_direction = normalize(push_constants.position.xyz - world.xyz);
// Calculate the percent of lighting that is received based on the orientation of the normal
// and the direction of the light.
float light_percent = max(-dot(light_direction, in_normal), 0.0);
float light_distance = length(push_constants.position.xyz - world.xyz);
// Further decrease light_percent based on the distance with the light position.
light_percent *= 1.0 / exp(light_distance);
vec3 in_diffuse = subpassLoad(u_diffuse).rgb;
f_color.rgb = push_constants.color.rgb * light_percent * in_diffuse;
f_color.a = 1.0;
}
"]
struct Dummy;
}

View File

@ -0,0 +1,506 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use std::sync::Arc;
use cgmath::Matrix4;
use cgmath::SquareMatrix;
use cgmath::Vector3;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::CommandBuffer;
use vulkano::device::Queue;
use vulkano::format::Format;
use vulkano::framebuffer::Framebuffer;
use vulkano::framebuffer::FramebufferAbstract;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::Subpass;
use vulkano::image::AttachmentImage;
use vulkano::image::ImageAccess;
use vulkano::image::ImageUsage;
use vulkano::image::ImageViewAccess;
use vulkano::sync::GpuFuture;
use frame::ambient_lighting_system::AmbientLightingSystem;
use frame::directional_lighting_system::DirectionalLightingSystem;
use frame::point_lighting_system::PointLightingSystem;
/// System that contains the necessary facilities for rendering a single frame.
pub struct FrameSystem {
// Queue to use to render everything.
gfx_queue: Arc<Queue>,
// Render pass used for the drawing. See the `new` method for the actual render pass content.
// We need to keep it in `FrameSystem` because we may want to recreate the intermediate buffers
// in of a change in the dimensions.
render_pass: Arc<RenderPassAbstract + Send + Sync>,
// Intermediate render target that will contain the albedo of each pixel of the scene.
diffuse_buffer: Arc<AttachmentImage>,
// Intermediate render target that will contain the normal vector in world coordinates of each
// pixel of the scene.
// The normal vector is the vector perpendicular to the surface of the object at this point.
normals_buffer: Arc<AttachmentImage>,
// Intermediate render target that will contain the depth of each pixel of the scene.
// This is a traditionnal depth buffer. `0.0` means "near", and `1.0` means "far".
depth_buffer: Arc<AttachmentImage>,
// Will allow us to add an ambient lighting to a scene during the second subpass.
ambient_lighting_system: AmbientLightingSystem,
// Will allow us to add a directional light to a scene during the second subpass.
directional_lighting_system: DirectionalLightingSystem,
// Will allow us to add a spot light source to a scene during the second subpass.
point_lighting_system: PointLightingSystem,
}
impl FrameSystem {
/// Initializes the frame system.
///
/// Should be called at initialization, as it can take some time to build.
///
/// - `gfx_queue` is the queue that will be used to perform the main rendering.
/// - `final_output_format` is the format of the image that will later be passed to the
/// `frame()` method. We need to know that in advance. If that format ever changes, we have
/// to create a new `FrameSystem`.
///
pub fn new(gfx_queue: Arc<Queue>, final_output_format: Format) -> FrameSystem {
// Creating the render pass.
//
// The render pass has two subpasses. In the first subpass, we draw all the objects of the
// scene. Note that it is not the `FrameSystem` that is responsible for the drawing,
// instead it only provides an API that allows the user to do so.
//
// The drawing of the objects will write to the `diffuse`, `normals` and `depth`
// attachments.
//
// Then in the second subpass, we read these three attachments as input attachments and
// draw to `final_color`. Each draw operation performed in this second subpass has its
// value added to `final_color` and not replaced, thanks to blending.
//
// > **Warning**: If the red, green or blue component of the final image goes over `1.0`
// > then it will be clamped. For example a pixel of `[2.0, 1.0, 1.0]` (which is red) will
// > be clamped to `[1.0, 1.0, 1.0]` (which is white) instead of being converted to
// > `[1.0, 0.5, 0.5]` as desired. In a real-life application you want to use an additional
// > intermediate image with a floating-point format, then perform additional passes to
// > convert all the colors in the correct range. These techniques are known as HDR and
// > tone mapping.
//
// Input attachments are a special kind of way to read images. You can only read from them
// from a fragment shader, and you can only read the pixel corresponding to the pixel
// currently being processed by the fragment shader. If you want to read from attachments
// but can't deal with these restrictions, then you should create multiple render passes
// instead.
let render_pass = Arc::new(
ordered_passes_renderpass!(gfx_queue.device().clone(),
attachments: {
// The image that will contain the final rendering (in this example the swapchain
// image, but it could be another image).
final_color: {
load: Clear,
store: Store,
format: final_output_format,
samples: 1,
},
// Will be bound to `self.diffuse_buffer`.
diffuse: {
load: Clear,
store: DontCare,
format: Format::A2B10G10R10UnormPack32,
samples: 1,
},
// Will be bound to `self.normals_buffer`.
normals: {
load: Clear,
store: DontCare,
format: Format::R16G16B16A16Sfloat,
samples: 1,
},
// Will be bound to `self.depth_buffer`.
depth: {
load: Clear,
store: DontCare,
format: Format::D16Unorm,
samples: 1,
}
},
passes: [
// Write to the diffuse, normals and depth attachments.
{
color: [diffuse, normals],
depth_stencil: {depth},
input: []
},
// Apply lighting by reading these three attachments and writing to `final_color`.
{
color: [final_color],
depth_stencil: {},
input: [diffuse, normals, depth]
}
]
).unwrap(),
);
// For now we create three temporary images with a dimension of 1 by 1 pixel.
// These images will be replaced the first time we call `frame()`.
// TODO: use shortcut provided in vulkano 0.6
let atch_usage = ImageUsage {
transient_attachment: true,
input_attachment: true,
..ImageUsage::none()
};
let diffuse_buffer = AttachmentImage::with_usage(gfx_queue.device().clone(),
[1, 1],
Format::A2B10G10R10UnormPack32,
atch_usage)
.unwrap();
let normals_buffer = AttachmentImage::with_usage(gfx_queue.device().clone(),
[1, 1],
Format::R16G16B16A16Sfloat,
atch_usage)
.unwrap();
let depth_buffer = AttachmentImage::with_usage(gfx_queue.device().clone(),
[1, 1],
Format::D16Unorm,
atch_usage)
.unwrap();
// Initialize the three lighting systems.
// Note that we need to pass to them the subpass where they will be executed.
let lighting_subpass = Subpass::from(render_pass.clone(), 1).unwrap();
let ambient_lighting_system =
AmbientLightingSystem::new(gfx_queue.clone(), lighting_subpass.clone());
let directional_lighting_system =
DirectionalLightingSystem::new(gfx_queue.clone(), lighting_subpass.clone());
let point_lighting_system =
PointLightingSystem::new(gfx_queue.clone(), lighting_subpass);
FrameSystem {
gfx_queue,
render_pass: render_pass as Arc<_>,
diffuse_buffer,
normals_buffer,
depth_buffer,
ambient_lighting_system,
directional_lighting_system,
point_lighting_system,
}
}
/// Returns the subpass of the render pass where the rendering should write info to gbuffers.
///
/// Has two outputs: the diffuse color (3 components) and the normals in world coordinates
/// (3 components). Also has a depth attachment.
///
/// This method is necessary in order to initialize the pipelines that will draw the objects
/// of the scene.
#[inline]
pub fn deferred_subpass(&self) -> Subpass<Arc<RenderPassAbstract + Send + Sync>> {
Subpass::from(self.render_pass.clone(), 0).unwrap()
}
/// Starts drawing a new frame.
///
/// - `before_future` is the future after which the main rendering should be executed.
/// - `final_image` is the image we are going to draw to.
/// - `world_to_framebuffer` is the matrix that will be used to convert from 3D coordinates in
/// the world into 2D coordinates on the framebuffer.
///
pub fn frame<F, I>(&mut self, before_future: F, final_image: I,
world_to_framebuffer: Matrix4<f32>) -> Frame
where F: GpuFuture + 'static,
I: ImageAccess + ImageViewAccess + Clone + Send + Sync + 'static
{
// First of all we recreate `self.diffuse_buffer`, `self.normals_buffer` and
// `self.depth_buffer` if their dimensions doesn't match the dimensions of the final image.
let img_dims = ImageAccess::dimensions(&final_image).width_height();
if ImageAccess::dimensions(&self.diffuse_buffer).width_height() != img_dims {
// TODO: use shortcut provided in vulkano 0.6
let atch_usage = ImageUsage {
transient_attachment: true,
input_attachment: true,
..ImageUsage::none()
};
// Note that we create "transient" images here. This means that the content of the
// image is only defined when within a render pass. In other words you can draw to
// them in a subpass then read them in another subpass, but as soon as you leave the
// render pass their content becomes undefined.
self.diffuse_buffer = AttachmentImage::with_usage(self.gfx_queue.device().clone(),
img_dims,
Format::A2B10G10R10UnormPack32,
atch_usage)
.unwrap();
self.normals_buffer = AttachmentImage::with_usage(self.gfx_queue.device().clone(),
img_dims,
Format::R16G16B16A16Sfloat,
atch_usage)
.unwrap();
self.depth_buffer = AttachmentImage::with_usage(self.gfx_queue.device().clone(),
img_dims,
Format::D16Unorm,
atch_usage)
.unwrap();
}
// Build the framebuffer. The image must be attached in the same order as they were defined
// with the `ordered_passes_renderpass!` macro.
let framebuffer = Arc::new(Framebuffer::start(self.render_pass.clone())
.add(final_image.clone())
.unwrap()
.add(self.diffuse_buffer.clone())
.unwrap()
.add(self.normals_buffer.clone())
.unwrap()
.add(self.depth_buffer.clone())
.unwrap()
.build()
.unwrap());
// Start the command buffer builder that will be filled throughout the frame handling.
let command_buffer =
Some(AutoCommandBufferBuilder::primary_one_time_submit(self.gfx_queue
.device()
.clone(),
self.gfx_queue.family())
.unwrap()
.begin_render_pass(framebuffer.clone(),
true,
vec![[0.0, 0.0, 0.0, 0.0].into(),
[0.0, 0.0, 0.0, 0.0].into(),
[0.0, 0.0, 0.0, 0.0].into(),
1.0f32.into()])
.unwrap());
Frame {
system: self,
before_main_cb_future: Some(Box::new(before_future)),
framebuffer,
num_pass: 0,
command_buffer,
world_to_framebuffer,
}
}
}
/// Represents the active process of rendering a frame.
///
/// This struct mutably borrows the `FrameSystem`.
pub struct Frame<'a> {
// The `FrameSystem`.
system: &'a mut FrameSystem,
// The active pass we are in. This keeps track of the step we are in.
// - If `num_pass` is 0, then we haven't start anything yet.
// - If `num_pass` is 1, then we have finished drawing all the objects of the scene.
// - If `num_pass` is 2, then we have finished applying lighting.
// - Otherwise the frame is finished.
// In a more complex application you can have dozens of passes, in which case you probably
// don't want to document them all here.
num_pass: u8,
// Future to wait upon before the main rendering.
before_main_cb_future: Option<Box<GpuFuture>>,
// Framebuffer that was used when starting the render pass.
framebuffer: Arc<FramebufferAbstract + Send + Sync>,
// The command buffer builder that will be built during the lifetime of this object.
command_buffer: Option<AutoCommandBufferBuilder>,
// Matrix that was passed to `frame()`.
world_to_framebuffer: Matrix4<f32>,
}
impl<'a> Frame<'a> {
/// Returns an enumeration containing the next pass of the rendering.
pub fn next_pass<'f>(&'f mut self) -> Option<Pass<'f, 'a>> {
// This function reads `num_pass` increments its value, and returns a struct corresponding
// to that pass that the user will be able to manipulate in order to customize the pass.
match { let current_pass = self.num_pass; self.num_pass += 1; current_pass } {
0 => {
// If we are in the pass 0 then we haven't start anything yet.
// We already called `begin_render_pass` (in the `frame()` method), and that's the
// state we are in.
// We return an object that will allow the user to draw objects on the scene.
Some(Pass::Deferred(DrawPass {
frame: self,
}))
},
1 => {
// If we are in pass 1 then we have finished drawing the objects on the scene.
// Going to the next subpass.
self.command_buffer = Some(
self.command_buffer
.take()
.unwrap()
.next_subpass(true)
.unwrap()
);
// And returning an object that will allow the user to apply lighting to the scene.
Some(Pass::Lighting(LightingPass {
frame: self,
}))
},
2 => {
// If we are in pass 2 then we have finished applying lighting.
// We take the builder, call `end_render_pass()`, and then `build()` it to obtain
// an actual command buffer.
let command_buffer =
self.command_buffer
.take()
.unwrap()
.end_render_pass()
.unwrap()
.build()
.unwrap();
// Extract `before_main_cb_future` and append the command buffer execution to it.
let after_main_cb = self.before_main_cb_future.take().unwrap()
.then_execute(self.system.gfx_queue.clone(), command_buffer)
.unwrap();
// We obtain `after_main_cb`, which we give to the user.
Some(Pass::Finished(Box::new(after_main_cb)))
},
// If the pass is over 2 then the frame is in the finished state and can't do anything
// more.
_ => None,
}
}
}
/// Struct provided to the user that allows them to customize or handle the pass.
pub enum Pass<'f, 's: 'f> {
/// We are in the pass where we draw objects on the scene. The `DrawPass` allows the user to
/// draw the objects.
Deferred(DrawPass<'f, 's>),
/// We are in the pass where we add lighting to the scene. The `LightingPass` allows the user
/// to add light sources.
Lighting(LightingPass<'f, 's>),
/// The frame has been fully prepared, and here is the future that will perform the drawing
/// on the image.
Finished(Box<GpuFuture>),
}
/// Allows the user to draw objects on the scene.
pub struct DrawPass<'f, 's: 'f> {
frame: &'f mut Frame<'s>,
}
impl<'f, 's: 'f> DrawPass<'f, 's> {
/// Appends a command that executes a secondary command buffer that performs drawing.
#[inline]
pub fn execute<C>(&mut self, command_buffer: C)
where C: CommandBuffer + Send + Sync + 'static
{
// Note that vulkano doesn't perform any safety check for now when executing secondary
// command buffers, hence why it is unsafe. This operation will be safe in the future
// however.
// TODO: ^
unsafe {
self.frame.command_buffer = Some(self.frame
.command_buffer
.take()
.unwrap()
.execute_commands(command_buffer)
.unwrap());
}
}
/// Returns the dimensions in pixels of the viewport.
#[inline]
pub fn viewport_dimensions(&self) -> [u32; 2] {
let dims = self.frame.framebuffer.dimensions();
[dims[0], dims[1]]
}
/// Returns the 4x4 matrix that turns world coordinates into 2D coordinates on the framebuffer.
#[inline]
pub fn world_to_framebuffer_matrix(&self) -> Matrix4<f32> {
self.frame.world_to_framebuffer
}
}
/// Allows the user to apply lighting on the scene.
pub struct LightingPass<'f, 's: 'f> {
frame: &'f mut Frame<'s>,
}
impl<'f, 's: 'f> LightingPass<'f, 's> {
/// Applies an ambient lighting to the scene.
///
/// All the objects will be colored with an intensity of `color`.
pub fn ambient_light(&mut self, color: [f32; 3]) {
// Note that vulkano doesn't perform any safety check for now when executing secondary
// command buffers, hence why it is unsafe. This operation will be safe in the future
// however.
// TODO: ^
unsafe {
let dims = self.frame.framebuffer.dimensions();
let command_buffer = self.frame.system.ambient_lighting_system.draw([dims[0], dims[1]], self.frame.system.diffuse_buffer.clone(), color);
self.frame.command_buffer = Some(self.frame
.command_buffer
.take()
.unwrap()
.execute_commands(command_buffer)
.unwrap());
}
}
/// Applies an directional lighting to the scene.
///
/// All the objects will be colored with an intensity varying between `[0, 0, 0]` and `color`,
/// depending on the dot product of their normal and `direction`.
pub fn directional_light(&mut self, direction: Vector3<f32>, color: [f32; 3]) {
// Note that vulkano doesn't perform any safety check for now when executing secondary
// command buffers, hence why it is unsafe. This operation will be safe in the future
// however.
// TODO: ^
unsafe {
let dims = self.frame.framebuffer.dimensions();
let command_buffer = self.frame.system.directional_lighting_system.draw([dims[0], dims[1]], self.frame.system.diffuse_buffer.clone(), self.frame.system.normals_buffer.clone(), direction, color);
self.frame.command_buffer = Some(self.frame
.command_buffer
.take()
.unwrap()
.execute_commands(command_buffer)
.unwrap());
}
}
/// Applies a spot lighting to the scene.
///
/// All the objects will be colored with an intensity varying between `[0, 0, 0]` and `color`,
/// depending on their distance with `position`. Objects that aren't facing `position` won't
/// receive any light.
pub fn point_light(&mut self, position: Vector3<f32>, color: [f32; 3]) {
// Note that vulkano doesn't perform any safety check for now when executing secondary
// command buffers, hence why it is unsafe. This operation will be safe in the future
// however.
// TODO: ^
unsafe {
let dims = self.frame.framebuffer.dimensions();
let command_buffer = {
self.frame.system.point_lighting_system.draw([dims[0], dims[1]],
self.frame.system.diffuse_buffer.clone(),
self.frame.system.normals_buffer.clone(),
self.frame.system.depth_buffer.clone(),
self.frame.world_to_framebuffer.invert().unwrap(),
position, color)
};
self.frame.command_buffer = Some(self.frame
.command_buffer
.take()
.unwrap()
.execute_commands(command_buffer)
.unwrap());
}
}
}

View File

@ -0,0 +1,183 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
// Welcome to the deferred lighting example!
//
// The idea behind deferred lighting is to render the scene in two steps.
//
// First you draw all the objects of the scene. But instead of calculating the color they will
// have on the screen, you output their characteristics such as their diffuse color and their
// normals, and write this to images.
//
// After all the objects are drawn, you should obtain several images that contain the
// characteristics of each pixel.
//
// Then you apply lighting to the scene. In other words you draw to the final image by taking
// these intermediate images and the various lights of the scene as input.
//
// This technique allows you to apply tons of light sources to a scene, which would be too
// expensive otherwise. It has some drawbacks, which are the fact that transparent objects must be
// drawn after the lighting, and that the whole process consumes more memory.
extern crate cgmath;
#[macro_use]
extern crate vulkano;
#[macro_use]
extern crate vulkano_shader_derive;
extern crate winit;
extern crate vulkano_win;
use vulkano_win::VkSurfaceBuild;
use vulkano::device::Device;
use vulkano::instance::Instance;
use vulkano::swapchain;
use vulkano::swapchain::PresentMode;
use vulkano::swapchain::SurfaceTransform;
use vulkano::swapchain::Swapchain;
use vulkano::swapchain::AcquireError;
use vulkano::swapchain::SwapchainCreationError;
use vulkano::sync::now;
use vulkano::sync::GpuFuture;
use cgmath::Matrix4;
use cgmath::SquareMatrix;
use cgmath::Vector3;
use std::mem;
mod frame;
mod triangle_draw_system;
fn main() {
// Basic initialization. See the triangle example if you want more details about this.
let instance = {
let extensions = vulkano_win::required_extensions();
Instance::new(None, &extensions, None).expect("failed to create Vulkan instance")
};
let physical = vulkano::instance::PhysicalDevice::enumerate(&instance)
.next().expect("no device available");
let mut events_loop = winit::EventsLoop::new();
let window = winit::WindowBuilder::new().build_vk_surface(&events_loop, instance.clone()).unwrap();
let dimensions = {
let window = window.window();
let factor = window.get_hidpi_factor();
let (width, height) = window.get_inner_size().unwrap().to_physical(factor).into();
[width, height]
};
let queue_family = physical.queue_families().find(|&q| {
q.supports_graphics() && window.is_supported(q).unwrap_or(false)
}).expect("couldn't find a graphical queue family");
let (device, mut queues) = {
let device_ext = vulkano::device::DeviceExtensions {
khr_swapchain: true,
.. vulkano::device::DeviceExtensions::none()
};
Device::new(physical, physical.supported_features(), &device_ext,
[(queue_family, 0.5)].iter().cloned()).expect("failed to create device")
};
let queue = queues.next().unwrap();
let (mut swapchain, mut images) = {
let caps = window.capabilities(physical)
.expect("failed to get surface capabilities");
let alpha = caps.supported_composite_alpha.iter().next().unwrap();
let format = caps.supported_formats[0].0;
Swapchain::new(device.clone(), window.clone(), caps.min_image_count, format,
dimensions, 1, caps.supported_usage_flags, &queue,
SurfaceTransform::Identity, alpha, PresentMode::Fifo, true,
None).expect("failed to create swapchain")
};
// Here is the basic initialization for the deferred system.
let mut frame_system = frame::FrameSystem::new(queue.clone(), swapchain.format());
let triangle_draw_system = triangle_draw_system::TriangleDrawSystem::new(queue.clone(),
frame_system.deferred_subpass());
let mut recreate_swapchain = false;
let mut previous_frame_end = Box::new(now(device.clone())) as Box<GpuFuture>;
loop {
previous_frame_end.cleanup_finished();
if recreate_swapchain {
let dimensions = {
let (new_width, new_height) = window.window().get_inner_size().unwrap().into();
[new_width, new_height]
};
let (new_swapchain, new_images) = match swapchain.recreate_with_dimension(dimensions) {
Ok(r) => r,
Err(SwapchainCreationError::UnsupportedDimensions) => {
continue;
},
Err(err) => panic!("{:?}", err)
};
mem::replace(&mut swapchain, new_swapchain);
mem::replace(&mut images, new_images);
recreate_swapchain = false;
}
let (image_num, acquire_future) = match swapchain::acquire_next_image(swapchain.clone(),
None) {
Ok(r) => r,
Err(AcquireError::OutOfDate) => {
recreate_swapchain = true;
continue;
},
Err(err) => panic!("{:?}", err)
};
let future = previous_frame_end.join(acquire_future);
let mut frame = frame_system.frame(future, images[image_num].clone(), Matrix4::identity());
let mut after_future = None;
while let Some(pass) = frame.next_pass() {
match pass {
frame::Pass::Deferred(mut draw_pass) => {
let cb = triangle_draw_system.draw(draw_pass.viewport_dimensions());
draw_pass.execute(cb);
},
frame::Pass::Lighting(mut lighting) => {
lighting.ambient_light([0.1, 0.1, 0.1]);
lighting.directional_light(Vector3::new(0.2, -0.1, -0.7), [0.6, 0.6, 0.6]);
lighting.point_light(Vector3::new(0.5, -0.5, -0.1), [1.0, 0.0, 0.0]);
lighting.point_light(Vector3::new(-0.9, 0.2, -0.15), [0.0, 1.0, 0.0]);
lighting.point_light(Vector3::new(0.0, 0.5, -0.05), [0.0, 0.0, 1.0]);
},
frame::Pass::Finished(af) => {
after_future = Some(af);
},
}
}
let after_frame = after_future.unwrap()
.then_swapchain_present(queue.clone(), swapchain.clone(), image_num)
.then_signal_fence_and_flush().unwrap();
previous_frame_end = Box::new(after_frame) as Box<_>;
let mut done = false;
events_loop.poll_events(|ev| {
match ev {
winit::Event::WindowEvent { event: winit::WindowEvent::CloseRequested, .. } => done = true,
winit::Event::WindowEvent { event: winit::WindowEvent::Resized (_), .. } => recreate_swapchain = true,
_ => ()
}
});
if done { return; }
}
}

View File

@ -0,0 +1,129 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer::AutoCommandBuffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::DynamicState;
use vulkano::device::Queue;
use vulkano::framebuffer::RenderPassAbstract;
use vulkano::framebuffer::Subpass;
use vulkano::pipeline::GraphicsPipeline;
use vulkano::pipeline::GraphicsPipelineAbstract;
use vulkano::pipeline::viewport::Viewport;
use std::sync::Arc;
pub struct TriangleDrawSystem {
gfx_queue: Arc<Queue>,
vertex_buffer: Arc<CpuAccessibleBuffer<[Vertex]>>,
pipeline: Arc<GraphicsPipelineAbstract + Send + Sync>,
}
impl TriangleDrawSystem {
/// Initializes a triangle drawing system.
pub fn new<R>(gfx_queue: Arc<Queue>, subpass: Subpass<R>) -> TriangleDrawSystem
where R: RenderPassAbstract + Send + Sync + 'static
{
let vertex_buffer = {
CpuAccessibleBuffer::from_iter(gfx_queue.device().clone(), BufferUsage::all(), [
Vertex { position: [-0.5, -0.25] },
Vertex { position: [0.0, 0.5] },
Vertex { position: [0.25, -0.1] }
].iter().cloned()).expect("failed to create buffer")
};
let pipeline = {
let vs = vs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
let fs = fs::Shader::load(gfx_queue.device().clone())
.expect("failed to create shader module");
Arc::new(GraphicsPipeline::start()
.vertex_input_single_buffer::<Vertex>()
.vertex_shader(vs.main_entry_point(), ())
.triangle_list()
.viewports_dynamic_scissors_irrelevant(1)
.fragment_shader(fs.main_entry_point(), ())
.depth_stencil_simple_depth()
.render_pass(subpass)
.build(gfx_queue.device().clone())
.unwrap()) as Arc<_>
};
TriangleDrawSystem {
gfx_queue: gfx_queue,
vertex_buffer: vertex_buffer,
pipeline: pipeline,
}
}
/// Builds a secondary command buffer that draws the triangle on the current subpass.
pub fn draw(&self, viewport_dimensions: [u32; 2]) -> AutoCommandBuffer {
AutoCommandBufferBuilder::secondary_graphics(self.gfx_queue.device().clone(),
self.gfx_queue.family(),
self.pipeline.clone().subpass())
.unwrap()
.draw(self.pipeline.clone(),
&DynamicState {
viewports: Some(vec![Viewport {
origin: [0.0, 0.0],
dimensions: [viewport_dimensions[0] as f32,
viewport_dimensions[1] as f32],
depth_range: 0.0 .. 1.0,
}]),
.. DynamicState::none()
},
vec![self.vertex_buffer.clone()], (), ())
.unwrap()
.build()
.unwrap()
}
}
#[derive(Debug, Clone)]
struct Vertex {
position: [f32; 2]
}
impl_vertex!(Vertex, position);
mod vs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "vertex"]
#[src = "
#version 450
layout(location = 0) in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
"]
struct Dummy;
}
mod fs {
#[derive(VulkanoShader)]
#[allow(dead_code)]
#[ty = "fragment"]
#[src = "
#version 450
layout(location = 0) out vec4 f_color;
layout(location = 1) out vec3 f_normal;
void main() {
f_color = vec4(1.0, 1.0, 1.0, 1.0);
f_normal = vec3(0.0, 0.0, 1.0);
}
"]
struct Dummy;
}

View File

Before

Width:  |  Height:  |  Size: 3.3 KiB

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

@ -7,9 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
// For the purpose of this example all unused code is allowed.
#![allow(dead_code)]
extern crate cgmath;
extern crate image;
extern crate winit;
@ -41,7 +38,7 @@ fn main() {
let mut dimensions;
let queue = physical.queue_families().find(|&q| q.supports_graphics() &&
let queue_family = physical.queue_families().find(|&q| q.supports_graphics() &&
surface.is_supported(q).unwrap_or(false))
.expect("couldn't find a graphical queue family");
@ -50,7 +47,7 @@ fn main() {
.. vulkano::device::DeviceExtensions::none()
};
let (device, mut queues) = vulkano::device::Device::new(physical, physical.supported_features(),
&device_ext, [(queue, 0.5)].iter().cloned())
&device_ext, [(queue_family, 0.5)].iter().cloned())
.expect("failed to create device");
let queue = queues.next().unwrap();
@ -259,6 +256,7 @@ void main() {
tex_coords = position + vec2(0.5);
}
"]
#[allow(dead_code)]
struct Dummy;
}
@ -277,5 +275,6 @@ void main() {
f_color = texture(tex, tex_coords);
}
"]
#[allow(dead_code)]
struct Dummy;
}

View File

@ -0,0 +1,260 @@
// Copyright (c) 2017 The vulkano developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. All files in the project carrying such
// notice may not be copied, modified, or distributed except
// according to those terms.
//! Multisampling anti-aliasing example, using a render pass resolve.
//!
//! # Introduction to multisampling
//!
//! When you draw an object on an image, this object occupies a certain set of pixels. Each pixel
//! of the image is either fully covered by the object, or not covered at all. There is no such
//! thing as a pixel that is half-covered by the object that you're drawing. What this means is
//! that you will sometimes see a "staircase effect" at the border of your object, also called
//! aliasing.
//!
//! The root cause of aliasing is that the resolution of the image is not high enough. If you
//! increase the size of the image you're drawing to, this effect will still exist but will be
//! much less visible.
//!
//! In order to decrease aliasing, some games and programs use what we call "Super-Sampling Anti
//! Aliasing" (SSAA). For example instead of drawing to an image of size 1024x1024, you draw to an
//! image of size 4096x4096. Then at the end, you scale down your image to 1024x1024 by merging
//! nearby pixels. Since the intermediate image is 4 times larger than the destination, this would
//! be x4 SSAA.
//!
//! However this technique is very expensive in terms of GPU power. The fragment shader and all
//! its calculations has to run four times more often.
//!
//! So instead of SSAA, a common alternative is MSAA (MultiSampling Anti Aliasing). The base
//! principle is more or less the same: you draw to an image of a larger dimension, and then at
//! the end you scale it down to the final size. The difference is that the fragment shader is
//! only run once per pixel of the final size, and its value is duplicated to fill to all the
//! pixels of the intermediate image that are covered by the object.
//!
//! For example, let's say that you use x4 MSAA, you draw to an intermediate image of size
//! 4096x4096, and your object covers the whole image. With MSAA, the fragment shader will only
//! be 1,048,576 times (1024 * 1024), compared to 16,777,216 times (4096 * 4096) with 4x SSAA.
//! Then the output of each fragment shader invocation is copied in each of the four pixels of the
//! intermediate image that correspond to each pixel of the final image.
//!
//! Now, let's say that your object doesn't cover the whole image. In this situation, only the
//! pixels of the intermediate image that are covered by the object will receive the output of the
//! fragment shader.
//!
//! Because of the way it works, this technique requires direct support from the hardware,
//! contrary to SSAA which can be done on any machine.
//!
//! # Multisampled images
//!
//! Using MSAA with Vulkan is done by creating a regular image, but with a number of samples per
//! pixel different from 1. For example if you want to use 4x MSAA, you should create an image with
//! 4 samples per pixel. Internally this image will have 4 times as many pixels as its dimensions
//! would normally require, but this is handled transparently for you. Drawing to a multisampled
//! image is exactly the same as drawing to a regular image.
//!
//! However multisampled images have some restrictions, for example you can't show them on the
//! screen (swapchain images are always single-sampled), and you can't copy them into a buffer.
//! Therefore when you have finished drawing, you have to blit your multisampled image to a
//! non-multisampled image. This operation is not a regular blit (blitting a multisampled image is
//! an error), instead it is called *resolving* the image.
//!
extern crate image;
#[macro_use]
extern crate vulkano;
#[macro_use]
extern crate vulkano_shader_derive;
use std::sync::Arc;
use image::ImageBuffer;
use image::Rgba;
use vulkano::buffer::BufferUsage;
use vulkano::buffer::CpuAccessibleBuffer;
use vulkano::command_buffer::AutoCommandBufferBuilder;
use vulkano::command_buffer::CommandBuffer;
use vulkano::command_buffer::DynamicState;
use vulkano::device::Device;
use vulkano::device::DeviceExtensions;
use vulkano::format::Format;
use vulkano::framebuffer::Framebuffer;
use vulkano::framebuffer::Subpass;
use vulkano::image::AttachmentImage;
use vulkano::image::Dimensions;
use vulkano::image::StorageImage;
use vulkano::instance::Features;
use vulkano::instance::Instance;
use vulkano::instance::InstanceExtensions;
use vulkano::instance::PhysicalDevice;
use vulkano::pipeline::GraphicsPipeline;
use vulkano::pipeline::viewport::Viewport;
use vulkano::sync::GpuFuture;
fn main() {
// The usual Vulkan initialization.
let instance = Instance::new(None, &InstanceExtensions::none(), None)
.expect("failed to create instance");
let physical = PhysicalDevice::enumerate(&instance).next().expect("no device available");
let queue_family = physical.queue_families()
.find(|&q| q.supports_graphics())
.expect("couldn't find a graphical queue family");
let (device, mut queues) = {
Device::new(physical, &Features::none(), &DeviceExtensions::none(),
[(queue_family, 0.5)].iter().cloned()).expect("failed to create device")
};
let queue = queues.next().unwrap();
// Creating our intermediate multisampled image.
//
// As explained in the introduction, we pass the same dimensions and format as for the final
// image. But we also pass the number of samples-per-pixel, which is 4 here.
let intermediary = AttachmentImage::transient_multisampled(device.clone(), [1024, 1024],
4, Format::R8G8B8A8Unorm).unwrap();
// This is the final image that will receive the anti-aliased triangle.
let image = StorageImage::new(device.clone(), Dimensions::Dim2d { width: 1024, height: 1024 },
Format::R8G8B8A8Unorm, Some(queue.family())).unwrap();
// In this example, we are going to perform the *resolve* (ie. turning a multisampled image
// into a non-multisampled one) as part of the render pass. This is the preferred method of
// doing so, as it the advantage that the Vulkan implementation doesn't have to write the
// content of the multisampled image back to memory at the end.
let render_pass = Arc::new(single_pass_renderpass!(device.clone(),
attachments: {
// The first framebuffer attachment is the intermediary image.
intermediary: {
load: Clear,
store: DontCare,
format: Format::R8G8B8A8Unorm,
samples: 4, // This has to match the image definition.
},
// The second framebuffer attachment is the final image.
color: {
load: DontCare,
store: Store,
format: Format::R8G8B8A8Unorm,
samples: 1, // Same here, this has to match.
}
},
pass: {
// When drawing, we have only one output which is the intermediary image.
color: [intermediary],
depth_stencil: {},
// The `resolve` array here must contain either zero entry (if you don't use
// multisampling), or one entry per color attachment. At the end of the pass, each
// color attachment will be *resolved* into the given image. In other words, here, at
// the end of the pass, the `intermediary` attachment will be copied to the attachment
// named `color`.
resolve: [color],
}
).unwrap());
// Creating the framebuffer, the calls to `add` match the list of attachments in order.
let framebuffer = Arc::new(Framebuffer::start(render_pass.clone())
.add(intermediary.clone()).unwrap()
.add(image.clone()).unwrap()
.build().unwrap());
// Here is the "end" of the multisampling example, as starting from here everything is the same
// as in any other example.
// The pipeline, vertex buffer, and command buffer are created in exactly the same way as
// without multisampling.
// At the end of the example, we copy the content of `image` (ie. the final image) to a buffer,
// then read the content of that buffer and save it to a PNG file.
mod vs {
#[derive(VulkanoShader)]
#[ty = "vertex"]
#[src = "
#version 450
layout(location = 0) in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
"]
#[allow(dead_code)]
struct Dummy;
}
mod fs {
#[derive(VulkanoShader)]
#[ty = "fragment"]
#[src = "
#version 450
layout(location = 0) out vec4 f_color;
void main() {
f_color = vec4(1.0, 0.0, 0.0, 1.0);
}
"]
#[allow(dead_code)]
struct Dummy;
}
let vs = vs::Shader::load(device.clone()).expect("failed to create shader module");
let fs = fs::Shader::load(device.clone()).expect("failed to create shader module");
#[derive(Copy, Clone)]
struct Vertex {
position: [f32; 2],
}
impl_vertex!(Vertex, position);
let vertex1 = Vertex { position: [-0.5, -0.5] };
let vertex2 = Vertex { position: [ 0.0, 0.5] };
let vertex3 = Vertex { position: [ 0.5, -0.25] };
let vertex_buffer = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(),
vec![vertex1, vertex2, vertex3].into_iter()).unwrap();
let pipeline = Arc::new(GraphicsPipeline::start()
.vertex_input_single_buffer::<Vertex>()
.vertex_shader(vs.main_entry_point(), ())
.viewports_dynamic_scissors_irrelevant(1)
.fragment_shader(fs.main_entry_point(), ())
.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())
.build(device.clone())
.unwrap());
let dynamic_state = DynamicState {
viewports: Some(vec![Viewport {
origin: [0.0, 0.0],
dimensions: [1024.0, 1024.0],
depth_range: 0.0 .. 1.0,
}]),
.. DynamicState::none()
};
let buf = CpuAccessibleBuffer::from_iter(device.clone(), BufferUsage::all(),
(0 .. 1024 * 1024 * 4).map(|_| 0u8))
.expect("failed to create buffer");
let command_buffer = AutoCommandBufferBuilder::primary_one_time_submit(device.clone(), queue.family()).unwrap()
.begin_render_pass(framebuffer.clone(), false, vec![[0.0, 0.0, 1.0, 1.0].into(), vulkano::format::ClearValue::None])
.unwrap()
.draw(pipeline.clone(), &dynamic_state, vertex_buffer.clone(), (), ())
.unwrap()
.end_render_pass()
.unwrap()
.copy_image_to_buffer(image.clone(), buf.clone())
.unwrap()
.build()
.unwrap();
let finished = command_buffer.execute(queue.clone()).unwrap();
finished.then_signal_fence_and_flush().unwrap()
.wait(None).unwrap();
let buffer_content = buf.read().unwrap();
let image = ImageBuffer::<Rgba<u8>, _>::from_raw(1024, 1024, &buffer_content[..]).unwrap();
image.save("triangle.png").unwrap();
}

View File

@ -12,11 +12,11 @@
//
// Note that you will need to do all correctness checking by yourself.
//
// runtime-shader.vert.glsl and runtime-shader.frag.glsl must be built by yourself.
// vert.glsl and frag.glsl must be built by yourself.
// One way of building them is to build Khronos' glslang and use
// glslangValidator tool:
// $ glslangValidator runtime-shader.vert.glsl -V -S vert -o runtime-shader.vert.spv
// $ glslangValidator runtime-shader.frag.glsl -V -S frag -o runtime-shader.frag.spv
// $ glslangValidator vert.glsl -V -S vert -o vert.spv
// $ glslangValidator frag.glsl -V -S frag -o frag.spv
// Vulkano uses glslangValidator to build your shaders internally.
#[macro_use]
extern crate vulkano;
@ -140,8 +140,8 @@ fn main() {
);
let vs = {
let mut f = File::open("src/bin/runtime-shader.vert.spv")
.expect("Can't find file src/bin/runtime-shader.vert.spv");
let mut f = File::open("src/bin/runtime-shader/vert.spv")
.expect("Can't find file src/bin/runtime-shader/vert.spv This example needs to be run from the root of the example crate.");
let mut v = vec![];
f.read_to_end(&mut v).unwrap();
// Create a ShaderModule on a device the same Shader::load does it.
@ -150,8 +150,8 @@ fn main() {
};
let fs = {
let mut f = File::open("src/bin/runtime-shader.frag.spv")
.expect("Can't find file src/bin/runtime-shader.frag.spv");
let mut f = File::open("src/bin/runtime-shader/frag.spv")
.expect("Can't find file src/bin/runtime-shader/frag.spv");
let mut v = vec![];
f.read_to_end(&mut v).unwrap();
unsafe { ShaderModule::new(graphics_device.clone(), &v) }.unwrap()

View File

@ -7,9 +7,6 @@
// notice may not be copied, modified, or distributed except
// according to those terms.
// For the purpose of this example all unused code is allowed.
#![allow(dead_code)]
extern crate examples;
extern crate cgmath;
extern crate winit;
@ -42,7 +39,7 @@ fn main() {
let mut dimensions;
let queue = physical.queue_families().find(|&q| q.supports_graphics() &&
let queue_family = physical.queue_families().find(|&q| q.supports_graphics() &&
surface.is_supported(q).unwrap_or(false))
.expect("couldn't find a graphical queue family");
@ -52,7 +49,7 @@ fn main() {
};
let (device, mut queues) = vulkano::device::Device::new(physical, physical.supported_features(),
&device_ext, [(queue, 0.5)].iter().cloned())
&device_ext, [(queue_family, 0.5)].iter().cloned())
.expect("failed to create device");
let queue = queues.next().unwrap();
@ -289,6 +286,7 @@ void main() {
gl_Position = uniforms.proj * worldview * vec4(position, 1.0);
}
"]
#[allow(dead_code)]
struct Dummy;
}
@ -311,5 +309,6 @@ void main() {
f_color = vec4(mix(dark_color, regular_color, brightness), 1.0);
}
"]
#[allow(dead_code)]
struct Dummy;
}

View File

@ -17,9 +17,6 @@
// and that you want to learn Vulkan. This means that for example it won't go into details about
// what a vertex or a shader is.
// For the purpose of this example all unused code is allowed.
#![allow(dead_code)]
// The `vulkano` crate is the main crate that you must use to use Vulkan.
#[macro_use]
extern crate vulkano;
@ -104,7 +101,7 @@ fn main() {
// window and a cross-platform Vulkan surface that represents the surface of the window.
let mut events_loop = winit::EventsLoop::new();
let surface = winit::WindowBuilder::new().build_vk_surface(&events_loop, instance.clone()).unwrap();
// The next step is to choose which GPU queue will execute our draw commands.
//
// Devices can provide multiple queues to run commands in parallel (for example a draw queue
@ -115,7 +112,7 @@ fn main() {
// queue to handle data transfers in parallel. In this example we only use one queue.
//
// We have to choose which queues to use early on, because we will need this info very soon.
let queue = physical.queue_families().find(|&q| {
let queue_family = physical.queue_families().find(|&q| {
// We take the first queue that supports drawing to our window.
q.supports_graphics() && surface.is_supported(q).unwrap_or(false)
}).expect("couldn't find a graphical queue family");
@ -146,7 +143,7 @@ fn main() {
};
Device::new(physical, physical.supported_features(), &device_ext,
[(queue, 0.5)].iter().cloned()).expect("failed to create device")
[(queue_family, 0.5)].iter().cloned()).expect("failed to create device")
};
// Since we can request multiple queues, the `queues` variable is in fact an iterator. In this
@ -166,7 +163,7 @@ fn main() {
// pass values that are allowed by the capabilities.
let caps = surface.capabilities(physical)
.expect("failed to get surface capabilities");
dimensions = caps.current_extent.unwrap_or([1024, 768]);
// We choose the dimensions of the swapchain to match the current extent of the surface.
@ -221,6 +218,7 @@ void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
"]
#[allow(dead_code)]
struct Dummy;
}
@ -236,6 +234,7 @@ void main() {
f_color = vec4(1.0, 0.0, 0.0, 1.0);
}
"]
#[allow(dead_code)]
struct Dummy;
}
@ -352,7 +351,7 @@ void main() {
dimensions = surface.capabilities(physical)
.expect("failed to get surface capabilities")
.current_extent.unwrap();
let (new_swapchain, new_images) = match swapchain.recreate_with_dimension(dimensions) {
Ok(r) => r,
// This error tends to happen when the user is manually resizing the window.