Compare commits

..

7 Commits

Author SHA1 Message Date
453a67a93f Add recording program 2024-12-30 23:40:26 +03:00
b5e4b35c6e Enable optimization for debug deps 2024-12-30 23:40:05 +03:00
1f97134511 Use WebP 2024-12-30 23:39:54 +03:00
7ec4a1480e Move most code to the lib 2024-12-30 21:36:18 +03:00
36e0ea9432 Move modules to lib crate 2024-12-30 20:10:49 +03:00
d17a3e3613 Move render pass creation into modules
That’s where the pipeline is created and, they need to match
2024-12-30 20:01:56 +03:00
fe94dd68a2 Make lights MUCH brighter 2024-12-30 19:45:05 +03:00
12 changed files with 528 additions and 194 deletions

63
Cargo.lock generated
View File

@ -100,6 +100,18 @@ dependencies = [
"libloading", "libloading",
] ]
[[package]]
name = "async-channel"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
dependencies = [
"concurrent-queue",
"event-listener-strategy",
"futures-core",
"pin-project-lite",
]
[[package]] [[package]]
name = "atomic-waker" name = "atomic-waker"
version = "1.1.2" version = "1.1.2"
@ -401,6 +413,27 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "event-listener"
version = "5.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba"
dependencies = [
"concurrent-queue",
"parking",
"pin-project-lite",
]
[[package]]
name = "event-listener-strategy"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
dependencies = [
"event-listener",
"pin-project-lite",
]
[[package]] [[package]]
name = "fdeflate" name = "fdeflate"
version = "0.3.7" version = "0.3.7"
@ -453,6 +486,12 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
[[package]]
name = "futures-core"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]] [[package]]
name = "gethostname" name = "gethostname"
version = "0.4.3" version = "0.4.3"
@ -595,12 +634,23 @@ checksum = "cd6f44aed642f18953a158afeb30206f4d50da59fbc66ecb53c66488de73563b"
dependencies = [ dependencies = [
"bytemuck", "bytemuck",
"byteorder-lite", "byteorder-lite",
"image-webp",
"num-traits", "num-traits",
"png", "png",
"zune-core", "zune-core",
"zune-jpeg", "zune-jpeg",
] ]
[[package]]
name = "image-webp"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e031e8e3d94711a9ccb5d6ea357439ef3dcbed361798bd4071dc4d9793fbe22f"
dependencies = [
"byteorder-lite",
"quick-error",
]
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "2.7.0" version = "2.7.0"
@ -1106,6 +1156,12 @@ dependencies = [
"ttf-parser", "ttf-parser",
] ]
[[package]]
name = "parking"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.12.3" version = "0.12.3"
@ -1246,6 +1302,12 @@ version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afbdc74edc00b6f6a218ca6a5364d6226a259d4b8ea1af4a0ea063f27e179f4d" checksum = "afbdc74edc00b6f6a218ca6a5364d6226a259d4b8ea1af4a0ea063f27e179f4d"
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]] [[package]]
name = "quick-xml" name = "quick-xml"
version = "0.36.2" version = "0.36.2"
@ -1329,6 +1391,7 @@ checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539"
name = "raytracing3" name = "raytracing3"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"async-channel",
"bytemuck", "bytemuck",
"glam", "glam",
"image", "image",

View File

@ -4,10 +4,20 @@ version = "0.1.0"
edition = "2021" edition = "2021"
default-run = "minitracer" default-run = "minitracer"
[profile.dev]
panic = 'abort'
[profile.dev.package."*"]
opt-level = 3
[profile.test.package."*"]
opt-level = 3
[dependencies] [dependencies]
async-channel = "2.3.1"
bytemuck = { version = "1.21.0", features = ["derive"] } bytemuck = { version = "1.21.0", features = ["derive"] }
glam = { version = "0.29.2", features = ["bytemuck"] } glam = { version = "0.29.2", features = ["bytemuck"] }
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg"] } image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "webp"] }
pollster = "0.4.0" pollster = "0.4.0"
rand = "0.8.5" rand = "0.8.5"
rand_distr = { version = "0.4.3", features = ["std_math"] } rand_distr = { version = "0.4.3", features = ["std_math"] }

View File

@ -1,6 +1,7 @@
use glam::{vec3, Vec3}; use glam::{vec3, Vec3};
use rand_distr::{Bernoulli, Distribution, Uniform}; use rand_distr::{Bernoulli, Distribution, Uniform};
#[derive(Debug, Clone, Copy)]
pub struct SphereParams { pub struct SphereParams {
pub radius: f32, pub radius: f32,
pub alpha: f32, pub alpha: f32,
@ -22,7 +23,7 @@ pub fn distr<R: rand::Rng>() -> impl Fn(&mut R) -> SphereParams {
move |rgen| SphereParams { move |rgen| SphereParams {
origin: pos.sample3(rgen), origin: pos.sample3(rgen),
radius: 2.0f32.powf(rad.sample(rgen)), radius: 2.0f32.powf(rad.sample(rgen)),
alpha: if emit.sample(rgen) { 10.0 } else { 0.0 }, alpha: if emit.sample(rgen) { 100.0 } else { 0.0 },
glossiness: gloss.sample(rgen), glossiness: gloss.sample(rgen),
amplitudes: ampl.sample3(rgen), amplitudes: ampl.sample3(rgen),
frequencies: freq.sample3(rgen), frequencies: freq.sample3(rgen),

View File

@ -121,7 +121,7 @@ fn main() {
let img = let img =
image::RgbaImage::from_raw(extent, extent, buf.slice(..).get_mapped_range().to_vec()).unwrap(); image::RgbaImage::from_raw(extent, extent, buf.slice(..).get_mapped_range().to_vec()).unwrap();
let img: image::RgbImage = img.convert(); let img: image::RgbImage = img.convert();
img.save(format!("textures/env{face}.jpeg")).unwrap(); img.save(format!("textures/env{face}.webp")).unwrap();
}); });
} }
}) })

View File

@ -1,45 +1,14 @@
use std::error::Error; use std::error::Error;
use glam::{mat3, uvec2, vec3, Vec3}; use glam::uvec2;
use image::ImageReader; use raytracing3::present::{self, Presenter};
use present::Presenter; use raytracing3::scene::{load_envmap, Renderer, SceneParams};
use trace::{Tracer, TracerData, TracerEnv};
use winit::{ use winit::{
event::{Event, WindowEvent}, event::{Event, WindowEvent},
event_loop::EventLoop, event_loop::EventLoop,
window::{Window, WindowAttributes}, window::{Window, WindowAttributes},
}; };
mod anim;
mod present;
mod trace;
pub use trace::Sphere;
struct CamLoc {
eye: Vec3,
forward: Vec3,
right: Vec3,
}
fn make_viewport(w: u32, h: u32) -> trace::Viewport {
let size = uvec2(w, h).as_vec2();
let size = size.normalize();
trace::Viewport {
corner: vec3(size.x, size.y, 1.),
}
}
fn convert_location(cam: CamLoc) -> trace::CameraLocation {
let fwd = cam.forward.normalize();
let up = cam.right.cross(fwd).normalize();
let right = up.cross(fwd).normalize();
trace::CameraLocation {
eye: cam.eye,
view: mat3(right, up, fwd),
}
}
const N_SPHERES: u32 = 100; const N_SPHERES: u32 = 100;
const RAYS_PER_PIXEL: u32 = 4; const RAYS_PER_PIXEL: u32 = 4;
@ -57,27 +26,8 @@ fn main() {
let output_format = wgpu::TextureFormat::Bgra8UnormSrgb; let output_format = wgpu::TextureFormat::Bgra8UnormSrgb;
let hdr_format = wgpu::TextureFormat::Rgba16Float; let hdr_format = wgpu::TextureFormat::Rgba16Float;
let tracer = Tracer::new(&device, hdr_format); let scene = SceneParams::new(N_SPHERES);
let mut rng = rand_pcg::Pcg32::new(42, 0); let renderer = Renderer::new(&device, envmap);
let sphere_params: Vec<_> = {
let distr = anim::distr();
(0..N_SPHERES).map(|_| distr(&mut rng)).collect()
};
let camera_params = {
let mut p = anim::distr()(&mut rng);
p.amplitudes *= 2.0;
p.frequencies *= 0.1;
p
};
let target_params = {
let mut p = anim::distr()(&mut rng);
p.origin = Vec3::splat(0.0);
p.amplitudes *= 0.5;
p.frequencies *= 0.1;
p
};
let tracer_env = TracerEnv::new(&device, &tracer, &envmap);
let presenter = Presenter::new(&device, output_format); let presenter = Presenter::new(&device, output_format);
let mut frame = 0; let mut frame = 0;
@ -126,76 +76,29 @@ fn main() {
let hdr = hdr.create_view(&wgpu::TextureViewDescriptor::default()); let hdr = hdr.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None }); let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
{ {
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { let mut render_pass = renderer.prepare(&mut encoder, &hdr);
label: None,
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &hdr,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
for _ in 0..RAYS_PER_PIXEL { for _ in 0..RAYS_PER_PIXEL {
frame += 1; frame += 1;
let time = frame as f32 / (60. * RAYS_PER_PIXEL as f32); let time = frame as f32 / (60. * RAYS_PER_PIXEL as f32);
let target = sphere_params[0].to_sphere(time - 0.2).center; renderer.render_frame(
let eye = camera_params.to_sphere(time).center; &device,
let right = camera_params.deriv(time);
let forward = target - eye;
let viewport = make_viewport(size.width, size.height);
let location = convert_location(CamLoc { eye, forward, right });
let spheres: Vec<_> = sphere_params.iter().map(|p| p.to_sphere(time)).collect();
let data = TracerData::new(&device, &tracer, &spheres);
tracer.render(
&mut render_pass, &mut render_pass,
&data, uvec2(size.width, size.height),
&tracer_env, &scene,
trace::Params { time,
max_reflections: 3, frame,
min_strength: 0.1,
sphere_count: N_SPHERES,
seed: frame,
},
viewport,
trace::Aperture {
radius: 0.001,
focal_distance: forward.length(),
glare_strength: 0.1,
glare_radius: 0.1,
},
location,
); );
} }
} }
{ presenter.render(
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { &device,
label: None, &mut encoder,
color_attachments: &[Some(wgpu::RenderPassColorAttachment { &hdr,
view: &view, &view,
resolve_target: None, present::Params {
ops: wgpu::Operations { divisor: RAYS_PER_PIXEL as f32,
load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT), },
store: wgpu::StoreOp::Store, );
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
presenter.render(
&device,
&mut render_pass,
&hdr,
present::Params {
divisor: RAYS_PER_PIXEL as f32,
},
);
}
queue.submit(std::iter::once(encoder.finish())); queue.submit(std::iter::once(encoder.finish()));
output.present(); output.present();
} }
@ -237,75 +140,3 @@ async fn init_gpu(wnd: &Window) -> Result<(wgpu::Device, wgpu::Queue, wgpu::Surf
.unwrap(); .unwrap();
Ok((device, queue, surface)) Ok((device, queue, surface))
} }
fn load_envmap(device: &wgpu::Device, queue: &wgpu::Queue) -> wgpu::TextureView {
let imgs = std::thread::scope(|s| {
[0, 1, 2, 3, 4, 5]
.map(|face| {
s.spawn(move || {
let img = ImageReader::open(format!("textures/env{face}.jpeg"))
.unwrap()
.with_guessed_format()
.unwrap()
.decode()
.unwrap();
img.to_rgba8()
})
})
.map(|t| t.join().unwrap())
});
let size = imgs[0].width();
for img in &imgs {
assert!(img.width() == size);
assert!(img.height() == size);
}
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 6,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
for (face, img) in imgs.iter().enumerate() {
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d {
x: 0,
y: 0,
z: face as u32,
},
aspect: wgpu::TextureAspect::All,
},
img.as_raw(),
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * size),
rows_per_image: Some(size),
},
wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 1,
},
);
}
texture.create_view(&wgpu::TextureViewDescriptor {
label: None,
format: None,
dimension: Some(wgpu::TextureViewDimension::Cube),
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
})
}

172
src/bin/rec.rs Normal file
View File

@ -0,0 +1,172 @@
use std::env::args;
use std::error::Error;
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
use glam::{uvec2, UVec2};
use image::buffer::ConvertBuffer;
use raytracing3::present::{self, Presenter};
use raytracing3::scene::{load_envmap, Renderer, SceneParams};
const SIZE: UVec2 = uvec2(1920, 1080);
const FRAME_RATE: u32 = 60;
const DURATION_SECONDS: u32 = 120;
const N_FRAMES: u32 = DURATION_SECONDS * FRAME_RATE;
const N_SPHERES: u32 = 100;
const RAYS_PER_PIXEL: u32 = 1024;
fn main() {
let args: Vec<_> = args().collect();
let [_, path] = args.as_slice() else {
panic!("invalid arguments");
};
let path: PathBuf = path.into();
fs::create_dir(&path).expect("failed to create the output directory");
std::thread::scope(|s| {
let (img_sender, img_receiver) = async_channel::bounded::<(u32, Arc<wgpu::Buffer>)>(50);
for _ in 0..16 {
let img_receiver = img_receiver.clone();
let path = &path;
s.spawn(move || {
while let Ok((frame, buffer)) = img_receiver.recv_blocking() {
let img = image::RgbaImage::from_raw(SIZE.x, SIZE.y, buffer.slice(..).get_mapped_range().to_vec())
.expect("read failure!");
let img: image::RgbImage = img.convert();
img.save(path.join(&format!("frame{frame:06}.webp")))
.expect("save failure!");
}
});
}
do_work(img_sender);
});
}
fn do_work(img_sender: async_channel::Sender<(u32, Arc<wgpu::Buffer>)>) {
let img_sender = Arc::new(img_sender);
let (device, queue) = pollster::block_on(init_gpu()).unwrap();
let envmap = load_envmap(&device, &queue);
queue.submit([]);
let texsize = wgpu::Extent3d {
width: SIZE.x,
height: SIZE.y,
depth_or_array_layers: 1,
};
let output_format = wgpu::TextureFormat::Rgba8UnormSrgb;
let hdr_format = wgpu::TextureFormat::Rgba16Float;
let scene = SceneParams::new(N_SPHERES);
let renderer = Renderer::new(&device, envmap);
let presenter = Presenter::new(&device, output_format);
for frame in 0..N_FRAMES {
let output = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: texsize,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: output_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
view_formats: &[],
});
let view = output.create_view(&wgpu::TextureViewDescriptor::default());
let hdr = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: texsize,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: hdr_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let hdr = hdr.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
{
let mut render_pass = renderer.prepare(&mut encoder, &hdr);
for subframe in 0..RAYS_PER_PIXEL {
let subframe = frame * RAYS_PER_PIXEL + subframe;
let time = subframe as f32 / (RAYS_PER_PIXEL * FRAME_RATE) as f32;
renderer.render_frame(&device, &mut render_pass, SIZE, &scene, time, subframe);
}
}
presenter.render(
&device,
&mut encoder,
&hdr,
&view,
present::Params {
divisor: RAYS_PER_PIXEL as f32,
},
);
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: None,
size: (4 * SIZE.x * SIZE.y) as u64,
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
mapped_at_creation: false,
});
encoder.copy_texture_to_buffer(
wgpu::ImageCopyTexture {
texture: &output,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
wgpu::ImageCopyBuffer {
buffer: &buffer,
layout: wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * SIZE.x),
rows_per_image: Some(SIZE.y),
},
},
texsize,
);
queue.submit([encoder.finish()]);
let buffer = Arc::new(buffer);
let img_sender = Arc::clone(&img_sender);
Arc::clone(&buffer)
.slice(..)
.map_async(wgpu::MapMode::Read, move |res| {
res.unwrap();
img_sender.send_blocking((frame, buffer)).unwrap();
});
}
device.poll(wgpu::Maintain::Wait);
}
async fn init_gpu() -> Result<(wgpu::Device, wgpu::Queue), Box<dyn Error>> {
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends: wgpu::Backends::PRIMARY,
..Default::default()
});
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: None,
force_fallback_adapter: false,
})
.await
.unwrap();
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
required_features: wgpu::Features::PUSH_CONSTANTS,
required_limits: wgpu::Limits {
max_push_constant_size: 128,
..Default::default()
},
memory_hints: Default::default(),
},
None,
)
.await
.unwrap();
Ok((device, queue))
}

View File

@ -1 +1,6 @@
pub mod anim;
pub mod perlin; pub mod perlin;
pub mod present;
pub mod scene;
pub mod trace;
pub use trace::Sphere;

View File

@ -86,7 +86,7 @@ impl Presenter {
} }
} }
pub fn render( fn render_internal(
&self, &self,
device: &wgpu::Device, device: &wgpu::Device,
pass: &mut wgpu::RenderPass, pass: &mut wgpu::RenderPass,
@ -121,4 +121,29 @@ impl Presenter {
pass.set_bind_group(0, &bindings, &[]); pass.set_bind_group(0, &bindings, &[]);
pass.draw(0..4, 0..1); pass.draw(0..4, 0..1);
} }
pub fn render(
&self,
device: &wgpu::Device,
encoder: &mut wgpu::CommandEncoder,
source: &wgpu::TextureView,
target: &wgpu::TextureView,
params: Params,
) {
let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: None,
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
});
self.render_internal(device, &mut render_pass, source, params);
}
} }

206
src/scene.rs Normal file
View File

@ -0,0 +1,206 @@
use crate::anim::{self, SphereParams};
use crate::trace::{self, Tracer, TracerData, TracerEnv};
use glam::{mat3, uvec2, vec3, UVec2, Vec3};
use image::ImageReader;
use std::f32::consts::PI;
struct CamLoc {
eye: Vec3,
forward: Vec3,
right: Vec3,
}
fn make_viewport(w: u32, h: u32) -> trace::Viewport {
let size = uvec2(w, h).as_vec2();
let size = size.normalize();
trace::Viewport {
corner: vec3(size.x, size.y, 1.),
}
}
fn convert_location(cam: CamLoc) -> trace::CameraLocation {
let fwd = cam.forward.normalize();
let up = cam.right.cross(fwd).normalize();
let right = up.cross(fwd).normalize();
trace::CameraLocation {
eye: cam.eye,
view: mat3(right, up, fwd),
}
}
const N_SPHERES: u32 = 100;
const CAMERA_LAG: f32 = 0.03;
pub struct Renderer {
tracer: Tracer,
env: TracerEnv,
}
impl Renderer {
pub fn new(device: &wgpu::Device, env: wgpu::TextureView) -> Self {
let hdr_format = wgpu::TextureFormat::Rgba16Float;
let tracer = Tracer::new(&device, hdr_format);
let env = TracerEnv::new(&device, &tracer, &env);
Self { tracer, env }
}
}
pub struct SceneParams {
pub spheres: Vec<SphereParams>,
pub camera: SphereParams,
pub target: SphereParams,
}
impl SceneParams {
pub fn new(n_spheres: u32) -> Self {
let mut rng = rand_pcg::Pcg32::new(42, 0);
let spheres: Vec<_> = {
let distr = anim::distr();
(0..n_spheres).map(|_| distr(&mut rng)).collect()
};
let camera = {
let mut p = anim::distr()(&mut rng);
p.amplitudes *= 2.0;
p.frequencies *= 0.1;
p
};
let target = {
let mut p = spheres[0];
p.phases -= 2. * PI * CAMERA_LAG * p.frequencies;
p
};
Self {
spheres,
camera,
target,
}
}
}
impl Default for SceneParams {
fn default() -> Self {
Self::new(N_SPHERES)
}
}
pub struct RenderPass<'encoder>(wgpu::RenderPass<'encoder>);
impl Renderer {
pub fn prepare<'encoder>(
&self,
encoder: &'encoder mut wgpu::CommandEncoder,
target: &wgpu::TextureView,
) -> RenderPass<'encoder> {
RenderPass(self.tracer.prepare(encoder, target))
}
pub fn render_frame(
&self,
device: &wgpu::Device,
render_pass: &mut RenderPass,
size: UVec2,
scene: &SceneParams,
time: f32,
seed: u32,
) {
let target = scene.target.to_sphere(time).center;
let eye = scene.camera.to_sphere(time).center;
let right = scene.camera.deriv(time);
let forward = target - eye;
let viewport = make_viewport(size.x, size.y);
let location = convert_location(CamLoc { eye, forward, right });
let spheres: Vec<_> = scene.spheres.iter().map(|p| p.to_sphere(time)).collect();
let data = TracerData::new(&device, &self.tracer, &spheres);
self.tracer.render(
&mut render_pass.0,
&data,
&self.env,
trace::Params {
max_reflections: 3,
min_strength: 0.1,
sphere_count: N_SPHERES,
seed,
},
viewport,
trace::Aperture {
radius: 0.001,
focal_distance: forward.length(),
glare_strength: 0.1,
glare_radius: 0.1,
},
location,
);
}
}
pub fn load_envmap(device: &wgpu::Device, queue: &wgpu::Queue) -> wgpu::TextureView {
let imgs = std::thread::scope(|s| {
[0, 1, 2, 3, 4, 5]
.map(|face| {
s.spawn(move || {
let img = ImageReader::open(format!("textures/env{face}.webp"))
.unwrap()
.with_guessed_format()
.unwrap()
.decode()
.unwrap();
img.to_rgba8()
})
})
.map(|t| t.join().unwrap())
});
let size = imgs[0].width();
for img in &imgs {
assert!(img.width() == size);
assert!(img.height() == size);
}
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 6,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
for (face, img) in imgs.iter().enumerate() {
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d {
x: 0,
y: 0,
z: face as u32,
},
aspect: wgpu::TextureAspect::All,
},
img.as_raw(),
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * size),
rows_per_image: Some(size),
},
wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 1,
},
);
}
texture.create_view(&wgpu::TextureViewDescriptor {
label: None,
format: None,
dimension: Some(wgpu::TextureViewDimension::Cube),
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
})
}

View File

@ -233,6 +233,27 @@ impl Tracer {
Self { view_buf, pipeline } Self { view_buf, pipeline }
} }
pub fn prepare<'encoder>(
&self,
encoder: &'encoder mut wgpu::CommandEncoder,
target: &wgpu::TextureView,
) -> wgpu::RenderPass<'encoder> {
encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: None,
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: &target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT),
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
occlusion_query_set: None,
timestamp_writes: None,
})
}
pub fn render( pub fn render(
&self, &self,
pass: &mut wgpu::RenderPass, pass: &mut wgpu::RenderPass,