Add recording program

This commit is contained in:
numzero 2024-12-30 23:40:26 +03:00
parent b5e4b35c6e
commit 453a67a93f
3 changed files with 225 additions and 0 deletions

52
Cargo.lock generated
View File

@ -100,6 +100,18 @@ dependencies = [
"libloading",
]
[[package]]
name = "async-channel"
version = "2.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a"
dependencies = [
"concurrent-queue",
"event-listener-strategy",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "atomic-waker"
version = "1.1.2"
@ -401,6 +413,27 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "event-listener"
version = "5.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba"
dependencies = [
"concurrent-queue",
"parking",
"pin-project-lite",
]
[[package]]
name = "event-listener-strategy"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
dependencies = [
"event-listener",
"pin-project-lite",
]
[[package]]
name = "fdeflate"
version = "0.3.7"
@ -453,6 +486,12 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
[[package]]
name = "futures-core"
version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
[[package]]
name = "gethostname"
version = "0.4.3"
@ -1117,6 +1156,12 @@ dependencies = [
"ttf-parser",
]
[[package]]
name = "parking"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
[[package]]
name = "parking_lot"
version = "0.12.3"
@ -1257,6 +1302,12 @@ version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "afbdc74edc00b6f6a218ca6a5364d6226a259d4b8ea1af4a0ea063f27e179f4d"
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]]
name = "quick-xml"
version = "0.36.2"
@ -1340,6 +1391,7 @@ checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539"
name = "raytracing3"
version = "0.1.0"
dependencies = [
"async-channel",
"bytemuck",
"glam",
"image",

View File

@ -14,6 +14,7 @@ opt-level = 3
opt-level = 3
[dependencies]
async-channel = "2.3.1"
bytemuck = { version = "1.21.0", features = ["derive"] }
glam = { version = "0.29.2", features = ["bytemuck"] }
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "webp"] }

172
src/bin/rec.rs Normal file
View File

@ -0,0 +1,172 @@
use std::env::args;
use std::error::Error;
use std::fs;
use std::path::PathBuf;
use std::sync::Arc;
use glam::{uvec2, UVec2};
use image::buffer::ConvertBuffer;
use raytracing3::present::{self, Presenter};
use raytracing3::scene::{load_envmap, Renderer, SceneParams};
const SIZE: UVec2 = uvec2(1920, 1080);
const FRAME_RATE: u32 = 60;
const DURATION_SECONDS: u32 = 120;
const N_FRAMES: u32 = DURATION_SECONDS * FRAME_RATE;
const N_SPHERES: u32 = 100;
const RAYS_PER_PIXEL: u32 = 1024;
fn main() {
let args: Vec<_> = args().collect();
let [_, path] = args.as_slice() else {
panic!("invalid arguments");
};
let path: PathBuf = path.into();
fs::create_dir(&path).expect("failed to create the output directory");
std::thread::scope(|s| {
let (img_sender, img_receiver) = async_channel::bounded::<(u32, Arc<wgpu::Buffer>)>(50);
for _ in 0..16 {
let img_receiver = img_receiver.clone();
let path = &path;
s.spawn(move || {
while let Ok((frame, buffer)) = img_receiver.recv_blocking() {
let img = image::RgbaImage::from_raw(SIZE.x, SIZE.y, buffer.slice(..).get_mapped_range().to_vec())
.expect("read failure!");
let img: image::RgbImage = img.convert();
img.save(path.join(&format!("frame{frame:06}.webp")))
.expect("save failure!");
}
});
}
do_work(img_sender);
});
}
fn do_work(img_sender: async_channel::Sender<(u32, Arc<wgpu::Buffer>)>) {
let img_sender = Arc::new(img_sender);
let (device, queue) = pollster::block_on(init_gpu()).unwrap();
let envmap = load_envmap(&device, &queue);
queue.submit([]);
let texsize = wgpu::Extent3d {
width: SIZE.x,
height: SIZE.y,
depth_or_array_layers: 1,
};
let output_format = wgpu::TextureFormat::Rgba8UnormSrgb;
let hdr_format = wgpu::TextureFormat::Rgba16Float;
let scene = SceneParams::new(N_SPHERES);
let renderer = Renderer::new(&device, envmap);
let presenter = Presenter::new(&device, output_format);
for frame in 0..N_FRAMES {
let output = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: texsize,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: output_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC,
view_formats: &[],
});
let view = output.create_view(&wgpu::TextureViewDescriptor::default());
let hdr = device.create_texture(&wgpu::TextureDescriptor {
label: None,
size: texsize,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: hdr_format,
usage: wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let hdr = hdr.create_view(&wgpu::TextureViewDescriptor::default());
let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });
{
let mut render_pass = renderer.prepare(&mut encoder, &hdr);
for subframe in 0..RAYS_PER_PIXEL {
let subframe = frame * RAYS_PER_PIXEL + subframe;
let time = subframe as f32 / (RAYS_PER_PIXEL * FRAME_RATE) as f32;
renderer.render_frame(&device, &mut render_pass, SIZE, &scene, time, subframe);
}
}
presenter.render(
&device,
&mut encoder,
&hdr,
&view,
present::Params {
divisor: RAYS_PER_PIXEL as f32,
},
);
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: None,
size: (4 * SIZE.x * SIZE.y) as u64,
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::MAP_READ,
mapped_at_creation: false,
});
encoder.copy_texture_to_buffer(
wgpu::ImageCopyTexture {
texture: &output,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
wgpu::ImageCopyBuffer {
buffer: &buffer,
layout: wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(4 * SIZE.x),
rows_per_image: Some(SIZE.y),
},
},
texsize,
);
queue.submit([encoder.finish()]);
let buffer = Arc::new(buffer);
let img_sender = Arc::clone(&img_sender);
Arc::clone(&buffer)
.slice(..)
.map_async(wgpu::MapMode::Read, move |res| {
res.unwrap();
img_sender.send_blocking((frame, buffer)).unwrap();
});
}
device.poll(wgpu::Maintain::Wait);
}
async fn init_gpu() -> Result<(wgpu::Device, wgpu::Queue), Box<dyn Error>> {
let instance = wgpu::Instance::new(wgpu::InstanceDescriptor {
backends: wgpu::Backends::PRIMARY,
..Default::default()
});
let adapter = instance
.request_adapter(&wgpu::RequestAdapterOptions {
power_preference: wgpu::PowerPreference::default(),
compatible_surface: None,
force_fallback_adapter: false,
})
.await
.unwrap();
let (device, queue) = adapter
.request_device(
&wgpu::DeviceDescriptor {
label: None,
required_features: wgpu::Features::PUSH_CONSTANTS,
required_limits: wgpu::Limits {
max_push_constant_size: 128,
..Default::default()
},
memory_hints: Default::default(),
},
None,
)
.await
.unwrap();
Ok((device, queue))
}