Improve the docs of PipelineCache and friends (#6978)

This commit is contained in:
Daniel McNab
2025-01-23 10:56:05 +00:00
committed by GitHub
parent e86ed8b6c8
commit 5e2bcc9191
4 changed files with 35 additions and 9 deletions

View File

@@ -83,6 +83,10 @@ By @brodycj in [#6925](https://github.com/gfx-rs/wgpu/pull/6925).
- Stop naga causing undefined behavior when a ray query misses. By @Vecvec in [#6752](https://github.com/gfx-rs/wgpu/pull/6752).
### Documentation
- Improved documentation around pipeline caches. By @DJMcNab in [#6978](https://github.com/gfx-rs/wgpu/pull/6978).
## v24.0.0 (2025-01-15)
### Major changes

View File

@@ -40,7 +40,7 @@ impl Default for PipelineCompilationOptions<'_> {
/// Describes a pipeline cache, which allows reusing compilation work
/// between program runs.
///
/// For use with [`Device::create_pipeline_cache`]
/// For use with [`Device::create_pipeline_cache`].
///
/// This type is unique to the Rust API of `wgpu`.
#[derive(Clone, Debug)]

View File

@@ -5,7 +5,9 @@ use crate::*;
/// in subsequent executions
///
/// This reuse is only applicable for the same or similar devices.
/// See [`util::pipeline_cache_key`] for some details.
/// See [`util::pipeline_cache_key`] for some details and a suggested workflow.
///
/// Created using [`Device::create_pipeline_cache`].
///
/// # Background
///
@@ -28,6 +30,7 @@ use crate::*;
///
/// # Usage
///
/// This is used as [`RenderPipelineDescriptor::cache`] or [`ComputePipelineDescriptor::cache`].
/// It is valid to use this resource when creating multiple pipelines, in
/// which case it will likely cache each of those pipelines.
/// It is also valid to create a new cache for each pipeline.

View File

@@ -155,16 +155,35 @@ impl std::ops::Deref for DownloadBuffer {
///
/// # Examples
///
/// ``` no_run
/// ```no_run
/// # use std::path::PathBuf;
/// use wgpu::PipelineCacheDescriptor;
/// # let adapter_info = todo!();
/// let cache_dir: PathBuf = PathBuf::new();
/// # let device: wgpu::Device = todo!();
/// let cache_dir: PathBuf = unimplemented!("Some reasonable platform-specific cache directory for your app.");
/// let filename = wgpu::util::pipeline_cache_key(&adapter_info);
/// if let Some(filename) = filename {
/// let cache_file = cache_dir.join(&filename);
/// let cache_data = std::fs::read(&cache_file);
/// let pipeline_cache: wgpu::PipelineCache = todo!("Use data (if present) to create a pipeline cache");
/// let (pipeline_cache, cache_file) = if let Some(filename) = filename {
/// let cache_path = cache_dir.join(&filename);
/// // If we failed to read the cache, for whatever reason, treat the data as lost.
/// // In a real app, we'd probably avoid caching entirely unless the error was "file not found".
/// let cache_data = std::fs::read(&cache_path).ok();
/// let pipeline_cache = unsafe {
/// device.create_pipeline_cache(&PipelineCacheDescriptor {
/// data: cache_data.as_deref(),
/// label: None,
/// fallback: true
/// })
/// };
/// (Some(pipeline_cache), Some(cache_path))
/// } else {
/// (None, None)
/// };
///
/// // Run pipeline initialisation, making sure to set the `cache`
/// // fields of your `*PipelineDescriptor` to `pipeline_cache`
///
/// // And then save the resulting cache (probably off the main thread).
/// if let (Some(pipeline_cache), Some(cache_file)) = (pipeline_cache, cache_file) {
/// let data = pipeline_cache.get_data();
/// if let Some(data) = data {
/// let temp_file = cache_file.with_extension("temp");
@@ -172,7 +191,7 @@ impl std::ops::Deref for DownloadBuffer {
/// std::fs::rename(&temp_file, &cache_file)?;
/// }
/// }
/// # Ok::<(), std::io::Error>(())
/// # Ok::<_, std::io::Error>(())
/// ```
///
/// [`PipelineCache`]: super::PipelineCache