Move most code to the lib

This commit is contained in:
numzero 2024-12-30 21:36:18 +03:00
parent 36e0ea9432
commit 7ec4a1480e
4 changed files with 219 additions and 148 deletions

View File

@ -1,6 +1,7 @@
use glam::{vec3, Vec3};
use rand_distr::{Bernoulli, Distribution, Uniform};
#[derive(Debug, Clone, Copy)]
pub struct SphereParams {
pub radius: f32,
pub alpha: f32,

View File

@ -1,40 +1,14 @@
use std::error::Error;
use glam::{mat3, uvec2, vec3, Vec3};
use image::ImageReader;
use raytracing3::anim;
use glam::uvec2;
use raytracing3::present::{self, Presenter};
use raytracing3::trace::{self, Tracer, TracerData, TracerEnv};
use raytracing3::scene::{load_envmap, Renderer, SceneParams};
use winit::{
event::{Event, WindowEvent},
event_loop::EventLoop,
window::{Window, WindowAttributes},
};
struct CamLoc {
eye: Vec3,
forward: Vec3,
right: Vec3,
}
fn make_viewport(w: u32, h: u32) -> trace::Viewport {
let size = uvec2(w, h).as_vec2();
let size = size.normalize();
trace::Viewport {
corner: vec3(size.x, size.y, 1.),
}
}
fn convert_location(cam: CamLoc) -> trace::CameraLocation {
let fwd = cam.forward.normalize();
let up = cam.right.cross(fwd).normalize();
let right = up.cross(fwd).normalize();
trace::CameraLocation {
eye: cam.eye,
view: mat3(right, up, fwd),
}
}
const N_SPHERES: u32 = 100;
const RAYS_PER_PIXEL: u32 = 4;
@ -52,27 +26,8 @@ fn main() {
let output_format = wgpu::TextureFormat::Bgra8UnormSrgb;
let hdr_format = wgpu::TextureFormat::Rgba16Float;
let tracer = Tracer::new(&device, hdr_format);
let mut rng = rand_pcg::Pcg32::new(42, 0);
let sphere_params: Vec<_> = {
let distr = anim::distr();
(0..N_SPHERES).map(|_| distr(&mut rng)).collect()
};
let camera_params = {
let mut p = anim::distr()(&mut rng);
p.amplitudes *= 2.0;
p.frequencies *= 0.1;
p
};
let target_params = {
let mut p = anim::distr()(&mut rng);
p.origin = Vec3::splat(0.0);
p.amplitudes *= 0.5;
p.frequencies *= 0.1;
p
};
let tracer_env = TracerEnv::new(&device, &tracer, &envmap);
let scene = SceneParams::new(N_SPHERES);
let renderer = Renderer::new(&device, envmap);
let presenter = Presenter::new(&device, output_format);
let mut frame = 0;
@ -121,36 +76,17 @@ fn main() {
let hdr = hdr.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
{
let mut render_pass = tracer.prepare(&mut encoder, &hdr);
let mut render_pass = renderer.prepare(&mut encoder, &hdr);
for _ in 0..RAYS_PER_PIXEL {
frame += 1;
let time = frame as f32 / (60. * RAYS_PER_PIXEL as f32);
let target = sphere_params[0].to_sphere(time - 0.2).center;
let eye = camera_params.to_sphere(time).center;
let right = camera_params.deriv(time);
let forward = target - eye;
let viewport = make_viewport(size.width, size.height);
let location = convert_location(CamLoc { eye, forward, right });
let spheres: Vec<_> = sphere_params.iter().map(|p| p.to_sphere(time)).collect();
let data = TracerData::new(&device, &tracer, &spheres);
tracer.render(
renderer.render_frame(
&device,
&mut render_pass,
&data,
&tracer_env,
trace::Params {
max_reflections: 3,
min_strength: 0.1,
sphere_count: N_SPHERES,
seed: frame,
},
viewport,
trace::Aperture {
radius: 0.001,
focal_distance: forward.length(),
glare_strength: 0.1,
glare_radius: 0.1,
},
location,
uvec2(size.width, size.height),
&scene,
time,
frame,
);
}
}
@ -204,75 +140,3 @@ async fn init_gpu(wnd: &Window) -> Result<(wgpu::Device, wgpu::Queue, wgpu::Surf
.unwrap();
Ok((device, queue, surface))
}
fn load_envmap(device: &wgpu::Device, queue: &wgpu::Queue) -> wgpu::TextureView {
let imgs = std::thread::scope(|s| {
[0, 1, 2, 3, 4, 5]
.map(|face| {
s.spawn(move || {
let img = ImageReader::open(format!("textures/env{face}.jpeg"))
.unwrap()
.with_guessed_format()
.unwrap()
.decode()
.unwrap();
img.to_rgba8()
})
})
.map(|t| t.join().unwrap())
});
let size = imgs[0].width();
for img in &imgs {
assert!(img.width() == size);
assert!(img.height() == size);
}
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 6,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
for (face, img) in imgs.iter().enumerate() {
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d {
x: 0,
y: 0,
z: face as u32,
},
aspect: wgpu::TextureAspect::All,
},
img.as_raw(),
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * size),
rows_per_image: Some(size),
},
wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 1,
},
);
}
texture.create_view(&wgpu::TextureViewDescriptor {
label: None,
format: None,
dimension: Some(wgpu::TextureViewDimension::Cube),
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
})
}

View File

@ -1,6 +1,6 @@
pub mod anim;
pub mod perlin;
pub mod present;
pub mod scene;
pub mod trace;
pub use trace::Sphere;

206
src/scene.rs Normal file
View File

@ -0,0 +1,206 @@
use crate::anim::{self, SphereParams};
use crate::trace::{self, Tracer, TracerData, TracerEnv};
use glam::{mat3, uvec2, vec3, UVec2, Vec3};
use image::ImageReader;
use std::f32::consts::PI;
struct CamLoc {
eye: Vec3,
forward: Vec3,
right: Vec3,
}
fn make_viewport(w: u32, h: u32) -> trace::Viewport {
let size = uvec2(w, h).as_vec2();
let size = size.normalize();
trace::Viewport {
corner: vec3(size.x, size.y, 1.),
}
}
fn convert_location(cam: CamLoc) -> trace::CameraLocation {
let fwd = cam.forward.normalize();
let up = cam.right.cross(fwd).normalize();
let right = up.cross(fwd).normalize();
trace::CameraLocation {
eye: cam.eye,
view: mat3(right, up, fwd),
}
}
const N_SPHERES: u32 = 100;
const CAMERA_LAG: f32 = 0.03;
pub struct Renderer {
tracer: Tracer,
env: TracerEnv,
}
impl Renderer {
pub fn new(device: &wgpu::Device, env: wgpu::TextureView) -> Self {
let hdr_format = wgpu::TextureFormat::Rgba16Float;
let tracer = Tracer::new(&device, hdr_format);
let env = TracerEnv::new(&device, &tracer, &env);
Self { tracer, env }
}
}
pub struct SceneParams {
pub spheres: Vec<SphereParams>,
pub camera: SphereParams,
pub target: SphereParams,
}
impl SceneParams {
pub fn new(n_spheres: u32) -> Self {
let mut rng = rand_pcg::Pcg32::new(42, 0);
let spheres: Vec<_> = {
let distr = anim::distr();
(0..n_spheres).map(|_| distr(&mut rng)).collect()
};
let camera = {
let mut p = anim::distr()(&mut rng);
p.amplitudes *= 2.0;
p.frequencies *= 0.1;
p
};
let target = {
let mut p = spheres[0];
p.phases -= 2. * PI * CAMERA_LAG * p.frequencies;
p
};
Self {
spheres,
camera,
target,
}
}
}
impl Default for SceneParams {
fn default() -> Self {
Self::new(N_SPHERES)
}
}
pub struct RenderPass<'encoder>(wgpu::RenderPass<'encoder>);
impl Renderer {
pub fn prepare<'encoder>(
&self,
encoder: &'encoder mut wgpu::CommandEncoder,
target: &wgpu::TextureView,
) -> RenderPass<'encoder> {
RenderPass(self.tracer.prepare(encoder, target))
}
pub fn render_frame(
&self,
device: &wgpu::Device,
render_pass: &mut RenderPass,
size: UVec2,
scene: &SceneParams,
time: f32,
seed: u32,
) {
let target = scene.target.to_sphere(time).center;
let eye = scene.camera.to_sphere(time).center;
let right = scene.camera.deriv(time);
let forward = target - eye;
let viewport = make_viewport(size.x, size.y);
let location = convert_location(CamLoc { eye, forward, right });
let spheres: Vec<_> = scene.spheres.iter().map(|p| p.to_sphere(time)).collect();
let data = TracerData::new(&device, &self.tracer, &spheres);
self.tracer.render(
&mut render_pass.0,
&data,
&self.env,
trace::Params {
max_reflections: 3,
min_strength: 0.1,
sphere_count: N_SPHERES,
seed,
},
viewport,
trace::Aperture {
radius: 0.001,
focal_distance: forward.length(),
glare_strength: 0.1,
glare_radius: 0.1,
},
location,
);
}
}
pub fn load_envmap(device: &wgpu::Device, queue: &wgpu::Queue) -> wgpu::TextureView {
let imgs = std::thread::scope(|s| {
[0, 1, 2, 3, 4, 5]
.map(|face| {
s.spawn(move || {
let img = ImageReader::open(format!("textures/env{face}.jpeg"))
.unwrap()
.with_guessed_format()
.unwrap()
.decode()
.unwrap();
img.to_rgba8()
})
})
.map(|t| t.join().unwrap())
});
let size = imgs[0].width();
for img in &imgs {
assert!(img.width() == size);
assert!(img.height() == size);
}
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 6,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST,
view_formats: &[],
});
for (face, img) in imgs.iter().enumerate() {
queue.write_texture(
wgpu::ImageCopyTexture {
texture: &texture,
mip_level: 0,
origin: wgpu::Origin3d {
x: 0,
y: 0,
z: face as u32,
},
aspect: wgpu::TextureAspect::All,
},
img.as_raw(),
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * size),
rows_per_image: Some(size),
},
wgpu::Extent3d {
width: size,
height: size,
depth_or_array_layers: 1,
},
);
}
texture.create_view(&wgpu::TextureViewDescriptor {
label: None,
format: None,
dimension: Some(wgpu::TextureViewDimension::Cube),
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
})
}