build: move from make to cargo xtask workflows

Re-implement `naga` development workflows using [`cargo xtask`]. Convert
`make` logic and shader test configuration as file with Bash variables
into an `xtask` crate and YAML files, respectively.

Pros:

* We now have a _portable_ workflow everywhere, which means Windows
  folks and people who don't install `make` don't have to suffer.
  😮‍💨
* Workflow logic is now relatively easy to inspect and change. Whew!
  💁🏻‍♂️💦
* Contributors can use their existing Rust knowledge to contribute to
  developer experience. 🎉
* `cargo xtask` is a relatively well-known convention for workflows in
  the ecosystem.
* We can do fancy things like allow folks to run at different log levels
  for workflows, depending on their tastes.

Cons:

* There's now a non-trivial compile step to project workflow.
  Incremental rebuilds seem to be pretty short, though!
* Code is much more verbose than the (very) terse `make` implementation.

[`cargo xtask`]: https://github.com/matklad/cargo-xtask
This commit is contained in:
Erich Gubler
2023-03-20 10:42:39 -04:00
committed by Teodor Tanasoaia
parent e8a7e50f0f
commit 91d48b2923
83 changed files with 1389 additions and 277 deletions

2
.cargo/config.toml Normal file
View File

@@ -0,0 +1,2 @@
[alias]
xtask = "run --manifest-path xtask/Cargo.toml --"

View File

@@ -8,6 +8,7 @@ on:
- 'tests/out/dot/*.dot'
- 'tests/out/wgsl/*.wgsl'
- 'src/front/wgsl/*'
- 'xtask/**'
jobs:
validate-linux:
@@ -19,10 +20,15 @@ jobs:
- name: Install tools
run: sudo apt-get install spirv-tools glslang-tools graphviz
- run: make validate-spv
- uses: Swatinem/rust-cache@v2
with:
workspaces: |
xtask -> target
- run: make validate-glsl
- run: cargo xtask validate spv
- run: make validate-dot
- run: cargo xtask validate glsl
- run: make validate-wgsl
- run: cargo xtask validate dot
- run: cargo xtask validate wgsl

View File

@@ -4,6 +4,7 @@ on:
paths:
- '.github/workflows/validation-macos.yml'
- 'tests/out/msl/*.msl'
- 'xtask/**'
jobs:
validate-macos:
@@ -12,4 +13,9 @@ jobs:
steps:
- uses: actions/checkout@v3
- run: make validate-msl
- uses: Swatinem/rust-cache@v2
with:
workspaces: |
xtask -> target
- run: cargo xtask validate msl

View File

@@ -4,6 +4,7 @@ on:
paths:
- '.github/workflows/validation-windows.yml'
- 'tests/out/hlsl/*.hlsl'
- 'xtask/**'
jobs:
validate-windows-dxc:
@@ -15,8 +16,12 @@ jobs:
- name: Add DirectXShaderCompiler
uses: napokue/setup-dxc@v1.1.0
- run: make validate-hlsl-dxc
shell: sh
- uses: Swatinem/rust-cache@v2
with:
workspaces: |
xtask -> target
- run: cargo xtask validate hlsl dxc
validate-windows-fxc:
name: HLSL via FXC
@@ -33,5 +38,9 @@ jobs:
| Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append
shell: powershell
- run: make validate-hlsl-fxc
shell: sh
- uses: Swatinem/rust-cache@v2
with:
workspaces: |
xtask -> target
- run: cargo xtask validate hlsl fxc

View File

@@ -68,6 +68,7 @@ unicode-xid = { version = "0.2.3", optional = true }
bincode = "1"
criterion = { version = "0.3", features = [] }
diff = "0.1"
hlsl-snapshots = { path = "./xtask/hlsl-snapshots"}
# Require at least version 0.7.1 of ron, this version changed how floating points are
# serialized by forcing them to always have the decimal part, this makes it backwards
# incompatible with our tests because we do a syntatic diff and not a semantic one.

131
Makefile
View File

@@ -1,131 +0,0 @@
.PHONY: all clean validate-spv validate-msl validate-glsl validate-dot validate-wgsl validate-hlsl-dxc validate-hlsl-fxc
.SECONDARY: boids.metal quad.metal
SNAPSHOTS_BASE_IN=tests/in
SNAPSHOTS_BASE_OUT=tests/out
all:
cargo fmt
cargo test --all-features --workspace
cargo clippy --all-features --workspace -- -D warnings
clean:
rm *.metal *.air *.metallib *.vert *.frag *.comp *.spv
bench:
#rm -Rf target/criterion
cargo bench
%.metal: $(SNAPSHOTS_BASE_IN)/%.wgsl $(wildcard src/*.rs src/**/*.rs examples/*.rs)
cargo run --features wgsl-in,msl-out -- $< $@
%.air: %.metal
xcrun -sdk macosx metal -c $< -mmacosx-version-min=10.11
%.metallib: %.air
xcrun -sdk macosx metallib $< -o $@
%.dot: $(SNAPSHOTS_BASE_IN)/%.wgsl $(wildcard src/*.rs src/front/wgsl/*.rs src/back/dot/*.rs bin/naga.rs)
cargo run --features wgsl-in,dot-out -- $< $@
%.png: %.dot
dot -Tpng $< -o $@
validate-spv: $(SNAPSHOTS_BASE_OUT)/spv/*.spvasm
@set -e && for file in $^ ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"}; \
version_line=$$(head -2 $${file} | tail -1); \
version=$${version_line#"; Version: "};\
cat $${file} | spirv-as --target-env spv$${version} - -o - | spirv-val -; \
done
validate-msl: $(SNAPSHOTS_BASE_OUT)/msl/*.msl
@set -e && for file in $^ ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"}; \
header=$$(head -n1 $${file}); \
cat $${file} | xcrun -sdk macosx metal -mmacosx-version-min=10.11 -std=macos-$${header:13:8} -x metal - -o /dev/null; \
done
validate-glsl: $(SNAPSHOTS_BASE_OUT)/glsl/*.glsl
@set -e && for file in $(SNAPSHOTS_BASE_OUT)/glsl/*.Vertex.glsl ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"};\
cat $${file} | glslangValidator --stdin -S vert; \
done
@set -e && for file in $(SNAPSHOTS_BASE_OUT)/glsl/*.Fragment.glsl ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"};\
cat $${file} | glslangValidator --stdin -S frag; \
done
@set -e && for file in $(SNAPSHOTS_BASE_OUT)/glsl/*.Compute.glsl ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"};\
cat $${file} | glslangValidator --stdin -S comp; \
done
validate-dot: $(SNAPSHOTS_BASE_OUT)/dot/*.dot
@set -e && for file in $^ ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"}; \
cat $${file} | dot -o /dev/null; \
done
validate-wgsl: $(SNAPSHOTS_BASE_OUT)/wgsl/*.wgsl
@set -e && for file in $^ ; do \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"}; \
cargo run $${file}; \
done
validate-hlsl-dxc: SHELL:=/usr/bin/env bash # required because config files uses arrays
validate-hlsl-dxc: $(SNAPSHOTS_BASE_OUT)/hlsl/*.hlsl
@set -e && for file in $^ ; do \
DXC_PARAMS="-Wno-parentheses-equality -Zi -Qembed_debug -Od"; \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"}; \
config="$$(dirname $${file})/$$(basename $${file}).config"; \
. $${config}; \
for (( i=0; i<$${#vertex[@]}; i++ )); do \
name=`echo $${vertex[i]} | cut -d \: -f 1`; \
profile=`echo $${vertex[i]} | cut -d \: -f 2`; \
(set -x; dxc $${file} -T $${profile} -E $${name} $${DXC_PARAMS} > /dev/null); \
done; \
for (( i=0; i<$${#fragment[@]}; i++ )); do \
name=`echo $${fragment[i]} | cut -d \: -f 1`; \
profile=`echo $${fragment[i]} | cut -d \: -f 2`; \
(set -x; dxc $${file} -T $${profile} -E $${name} $${DXC_PARAMS} > /dev/null); \
done; \
for (( i=0; i<$${#compute[@]}; i++ )); do \
name=`echo $${compute[i]} | cut -d \: -f 1`; \
profile=`echo $${compute[i]} | cut -d \: -f 2`; \
(set -x; dxc $${file} -T $${profile} -E $${name} $${DXC_PARAMS} > /dev/null); \
done; \
echo "======================"; \
done
validate-hlsl-fxc: SHELL:=/usr/bin/env bash # required because config files uses arrays
validate-hlsl-fxc: $(SNAPSHOTS_BASE_OUT)/hlsl/*.hlsl
@set -e && for file in $^ ; do \
FXC_PARAMS="-Zi -Od"; \
echo "Validating" $${file#"$(SNAPSHOTS_BASE_OUT)/"}; \
config="$$(dirname $${file})/$$(basename $${file}).config"; \
. $${config}; \
for (( i=0; i<$${#vertex[@]}; i++ )); do \
name=`echo $${vertex[i]} | cut -d \: -f 1`; \
profile=`echo $${vertex[i]} | cut -d \: -f 2`; \
sm=`echo $${profile} | cut -d \_ -f 2`; \
if (( sm < 6 )); then \
(set -x; fxc $${file} -T $${profile} -E $${name} $${FXC_PARAMS} > /dev/null); \
fi \
done; \
for (( i=0; i<$${#fragment[@]}; i++ )); do \
name=`echo $${fragment[i]} | cut -d \: -f 1`; \
profile=`echo $${fragment[i]} | cut -d \: -f 2`; \
sm=`echo $${profile} | cut -d \_ -f 2`; \
if (( sm < 6 )); then \
(set -x; fxc $${file} -T $${profile} -E $${name} $${FXC_PARAMS} > /dev/null); \
fi \
done; \
for (( i=0; i<$${#compute[@]}; i++ )); do \
name=`echo $${compute[i]} | cut -d \: -f 1`; \
profile=`echo $${compute[i]} | cut -d \: -f 2`; \
sm=`echo $${profile} | cut -d \_ -f 2`; \
if (( sm < 6 )); then \
(set -x; fxc $${file} -T $${profile} -E $${name} $${FXC_PARAMS} > /dev/null); \
fi \
done; \
echo "======================"; \
done

View File

@@ -1,3 +0,0 @@
vertex=(foo_vert:vs_5_1 )
fragment=(foo_frag:ps_5_1 )
compute=(assign_through_ptr:cs_5_1 )

20
tests/out/hlsl/access.ron Normal file
View File

@@ -0,0 +1,20 @@
(
vertex:[
(
entry_point:"foo_vert",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"foo_frag",
target_profile:"ps_5_1",
),
],
compute:[
(
entry_point:"assign_through_ptr",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(cs_main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"cs_main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(cs_main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"cs_main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

12
tests/out/hlsl/boids.ron Normal file
View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

12
tests/out/hlsl/empty.ron Normal file
View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(main_vec4vec3_:ps_5_1 main_vec2scalar:ps_5_1 )
compute=()

View File

@@ -0,0 +1,16 @@
(
vertex:[
],
fragment:[
(
entry_point:"main_vec4vec3_",
target_profile:"ps_5_1",
),
(
entry_point:"main_vec2scalar",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(fs_main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"fs_main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=(queries:vs_5_1 levels_queries:vs_5_1 )
fragment=(texture_sample:ps_5_1 texture_sample_comparison:ps_5_1 gather:ps_5_1 depth_no_comparison:ps_5_1 )
compute=(main:cs_5_1 depth_load:cs_5_1 )

40
tests/out/hlsl/image.ron Normal file
View File

@@ -0,0 +1,40 @@
(
vertex:[
(
entry_point:"queries",
target_profile:"vs_5_1",
),
(
entry_point:"levels_queries",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"texture_sample",
target_profile:"ps_5_1",
),
(
entry_point:"texture_sample_comparison",
target_profile:"ps_5_1",
),
(
entry_point:"gather",
target_profile:"ps_5_1",
),
(
entry_point:"depth_no_comparison",
target_profile:"ps_5_1",
),
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
(
entry_point:"depth_load",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vertex:vs_5_1 vertex_two_structs:vs_5_1 )
fragment=(fragment:ps_5_1 )
compute=(compute:cs_5_1 )

View File

@@ -0,0 +1,24 @@
(
vertex:[
(
entry_point:"vertex",
target_profile:"vs_5_1",
),
(
entry_point:"vertex_two_structs",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"fragment",
target_profile:"ps_5_1",
),
],
compute:[
(
entry_point:"compute",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vert_main:vs_5_1 )
fragment=(frag_main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,16 @@
(
vertex:[
(
entry_point:"vert_main",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"frag_main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vertex:vs_5_1 )
fragment=()
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
(
entry_point:"vertex",
target_profile:"vs_5_1",
),
],
fragment:[
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vert_main:vs_5_1 )
fragment=(main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,16 @@
(
vertex:[
(
entry_point:"vert_main",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=(main:vs_5_1 )
fragment=()
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
(
entry_point:"main",
target_profile:"vs_5_1",
),
],
fragment:[
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vert_main:vs_5_1 )
fragment=(frag_main:ps_5_1 fs_extra:ps_5_1 )
compute=()

20
tests/out/hlsl/quad.ron Normal file
View File

@@ -0,0 +1,20 @@
(
vertex:[
(
entry_point:"vert_main",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"frag_main",
target_profile:"ps_5_1",
),
(
entry_point:"fs_extra",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vs_main:vs_5_1 )
fragment=(fs_main:ps_5_1 fs_main_without_storage:ps_5_1 )
compute=()

20
tests/out/hlsl/shadow.ron Normal file
View File

@@ -0,0 +1,20 @@
(
vertex:[
(
entry_point:"vs_main",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"fs_main",
target_profile:"ps_5_1",
),
(
entry_point:"fs_main_without_storage",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=(vs_main:vs_5_1 )
fragment=(fs_main:ps_5_1 )
compute=()

16
tests/out/hlsl/skybox.ron Normal file
View File

@@ -0,0 +1,16 @@
(
vertex:[
(
entry_point:"vs_main",
target_profile:"vs_5_1",
),
],
fragment:[
(
entry_point:"fs_main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(derivatives:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"derivatives",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=(main:ps_5_1 )
compute=()

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
(
entry_point:"main",
target_profile:"ps_5_1",
),
],
compute:[
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(test_workgroupUniformLoad:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"test_workgroupUniformLoad",
target_profile:"cs_5_1",
),
],
)

View File

@@ -1,3 +0,0 @@
vertex=()
fragment=()
compute=(main:cs_5_1 )

View File

@@ -0,0 +1,12 @@
(
vertex:[
],
fragment:[
],
compute:[
(
entry_point:"main",
target_profile:"cs_5_1",
),
],
)

View File

@@ -348,56 +348,30 @@ fn write_output_hlsl(
// We need a config file for validation script
// This file contains an info about profiles (shader stages) contains inside generated shader
// This info will be passed to dxc
let mut config_str = String::new();
let mut vertex_str = String::from("vertex=(");
let mut fragment_str = String::from("fragment=(");
let mut compute_str = String::from("compute=(");
let mut config = hlsl_snapshots::Config::empty();
for (index, ep) in module.entry_points.iter().enumerate() {
let name = match reflection_info.entry_point_names[index] {
Ok(ref name) => name,
Err(_) => continue,
};
match ep.stage {
naga::ShaderStage::Vertex => {
write!(
vertex_str,
"{}:{}_{} ",
name,
ep.stage.to_hlsl_str(),
options.shader_model.to_str(),
)
.unwrap();
}
naga::ShaderStage::Fragment => {
write!(
fragment_str,
"{}:{}_{} ",
name,
ep.stage.to_hlsl_str(),
options.shader_model.to_str(),
)
.unwrap();
}
naga::ShaderStage::Compute => {
write!(
compute_str,
"{}:{}_{} ",
name,
ep.stage.to_hlsl_str(),
options.shader_model.to_str(),
)
.unwrap();
}
naga::ShaderStage::Vertex => &mut config.vertex,
naga::ShaderStage::Fragment => &mut config.fragment,
naga::ShaderStage::Compute => &mut config.compute,
}
.push(hlsl_snapshots::ConfigItem {
entry_point: name.clone(),
target_profile: format!(
"{}_{}",
ep.stage.to_hlsl_str(),
options.shader_model.to_str()
),
});
}
writeln!(config_str, "{vertex_str})\n{fragment_str})\n{compute_str})").unwrap();
fs::write(
destination.join(format!("hlsl/{file_name}.hlsl.config")),
config_str,
)
.unwrap();
config
.to_file(destination.join(format!("hlsl/{file_name}.ron")))
.unwrap();
}
#[cfg(feature = "wgsl-out")]

2
xtask/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
!Cargo.lock
target/

117
xtask/Cargo.lock generated Normal file
View File

@@ -0,0 +1,117 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.70"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "either"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "env_logger"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0"
dependencies = [
"log",
]
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "hlsl-snapshots"
version = "0.1.0"
dependencies = [
"anyhow",
"nanoserde",
]
[[package]]
name = "libc"
version = "0.2.140"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c"
[[package]]
name = "log"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
dependencies = [
"cfg-if",
]
[[package]]
name = "nanoserde"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "755e7965536bc54d7c9fba2df5ada5bf835b0443fd613f0a53fa199a301839d3"
dependencies = [
"nanoserde-derive",
]
[[package]]
name = "nanoserde-derive"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed7a94da6c6181c35d043fc61c43ac96d3a5d739e7b8027f77650ba41504d6ab"
[[package]]
name = "once_cell"
version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
[[package]]
name = "pico-args"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315"
[[package]]
name = "shell-words"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
[[package]]
name = "which"
version = "4.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269"
dependencies = [
"either",
"libc",
"once_cell",
]
[[package]]
name = "xtask"
version = "0.1.0"
dependencies = [
"anyhow",
"env_logger",
"glob",
"hlsl-snapshots",
"log",
"pico-args",
"shell-words",
"which",
]

19
xtask/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "xtask"
version = "0.1.0"
edition = "2021"
publish = false
[dependencies]
anyhow = "1"
env_logger = { version = "0.10.0", default-features = false }
glob = "0.3.1"
hlsl-snapshots = { path = "./hlsl-snapshots"}
log = "0.4.17"
pico-args = "0.5.0"
shell-words = "1.1.0"
which = "4.4.0"
[workspace]
members = [".", "./hlsl-snapshots"]

View File

@@ -0,0 +1,8 @@
[package]
name = "hlsl-snapshots"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1"
nanoserde = "0.1.32"

View File

@@ -0,0 +1,97 @@
use std::{error::Error, fmt::Display, fs, io, path::Path};
use anyhow::{anyhow, ensure};
use nanoserde::{self, DeRon, DeRonErr, SerRon};
#[derive(Debug)]
struct BadRonParse(BadRonParseKind);
impl Display for BadRonParse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "failed to read RON configuration of HLSL snapshot test")
}
}
impl Error for BadRonParse {
fn source(&self) -> Option<&(dyn Error + 'static)> {
Some(&self.0)
}
}
#[derive(Debug)]
enum BadRonParseKind {
Read { source: io::Error },
Parse { source: DeRonErr },
Empty,
}
impl Display for BadRonParseKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
BadRonParseKind::Read { source } => Display::fmt(source, f),
BadRonParseKind::Parse { source } => Display::fmt(source, f),
BadRonParseKind::Empty => write!(f, "no configuration was specified"),
}
}
}
impl Error for BadRonParseKind {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
BadRonParseKind::Read { source } => source.source(),
BadRonParseKind::Parse { source } => source.source(),
BadRonParseKind::Empty => None,
}
}
}
#[derive(Debug, DeRon, SerRon)]
pub struct Config {
pub vertex: Vec<ConfigItem>,
pub fragment: Vec<ConfigItem>,
pub compute: Vec<ConfigItem>,
}
impl Config {
pub fn empty() -> Self {
Self {
vertex: Default::default(),
fragment: Default::default(),
compute: Default::default(),
}
}
pub fn from_path(path: impl AsRef<Path>) -> anyhow::Result<Config> {
let path = path.as_ref();
let raw_config = fs::read_to_string(path)
.map_err(|source| BadRonParse(BadRonParseKind::Read { source }))?;
let config = Config::deserialize_ron(&raw_config)
.map_err(|source| BadRonParse(BadRonParseKind::Parse { source }))?;
ensure!(!config.is_empty(), BadRonParse(BadRonParseKind::Empty));
Ok(config)
}
pub fn to_file(&self, path: impl AsRef<Path>) -> anyhow::Result<()> {
let path = path.as_ref();
let mut s = self.serialize_ron();
s.push('\n');
fs::write(path, &s).map_err(|e| anyhow!("failed to write to {}: {e}", path.display()))
}
pub fn is_empty(&self) -> bool {
let Self {
vertex,
fragment,
compute,
} = self;
vertex.is_empty() && fragment.is_empty() && compute.is_empty()
}
}
#[derive(Debug, DeRon, SerRon)]
pub struct ConfigItem {
pub entry_point: String,
/// See also
/// <https://learn.microsoft.com/en-us/windows/win32/direct3dtools/dx-graphics-tools-fxc-using>.
pub target_profile: String,
}

160
xtask/src/cli.rs Normal file
View File

@@ -0,0 +1,160 @@
use std::process::exit;
use anyhow::{anyhow, bail, ensure, Context};
use pico_args::Arguments;
const HELP: &str = "\
Usage: xtask <COMMAND>
Commands:
all
bench [--clean]
validate
dot
glsl
hlsl
dxc
fxc
msl
spv
Options:
-h, --help Print help
";
#[derive(Debug)]
pub(crate) struct Args {
pub subcommand: Subcommand,
}
impl Args {
pub fn parse() -> Self {
let mut args = Arguments::from_env();
log::debug!("parsing args: {args:?}");
if args.contains("--help") {
eprint!("{HELP}");
exit(101);
}
match (|| -> anyhow::Result<_> {
let subcommand = Subcommand::parse(args)?;
Ok(Self { subcommand })
})() {
Ok(this) => this,
Err(e) => {
eprintln!("{:?}", anyhow!(e));
exit(1)
}
}
}
}
#[derive(Debug)]
pub(crate) enum Subcommand {
All,
Bench { clean: bool },
Validate(ValidateSubcommand),
}
impl Subcommand {
fn parse(mut args: Arguments) -> anyhow::Result<Subcommand> {
args.subcommand()
.context("failed to parse subcommand")
.and_then(|parsed| match parsed.as_deref() {
None => bail!("no subcommand specified; see `--help` for more details"),
Some("all") => {
ensure_remaining_args_empty(args)?;
Ok(Self::All)
}
Some("bench") => {
let clean = args.contains("--clean");
ensure_remaining_args_empty(args)?;
Ok(Self::Bench { clean })
}
Some("validate") => Ok(Self::Validate(ValidateSubcommand::parse(args)?)),
Some(other) => {
bail!("unrecognized subcommand {other:?}; see `--help` for more details")
}
})
}
}
#[derive(Debug)]
pub(crate) enum ValidateSubcommand {
Spirv,
Metal,
Glsl,
Dot,
Wgsl,
Hlsl(ValidateHlslCommand),
}
impl ValidateSubcommand {
fn parse(mut args: Arguments) -> Result<Self, anyhow::Error> {
args.subcommand()
.context("failed to parse `validate` subcommand")
.and_then(|parsed| match parsed.as_deref() {
None => bail!("no `validate` subcommand specified; see `--help` for more details"),
Some("spv") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Spirv)
}
Some("msl") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Metal)
}
Some("glsl") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Glsl)
}
Some("dot") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Dot)
}
Some("wgsl") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Wgsl)
}
Some("hlsl") => Ok(Self::Hlsl(ValidateHlslCommand::parse(args)?)),
Some(other) => bail!(
"unrecognized `validate` subcommand {other:?}; see `--help` for more details"
),
})
}
}
#[derive(Debug)]
pub(crate) enum ValidateHlslCommand {
Dxc,
Fxc,
}
impl ValidateHlslCommand {
fn parse(mut args: Arguments) -> anyhow::Result<Self> {
args.subcommand()
.context("failed to parse `hlsl` subcommand")
.and_then(|parsed| match parsed.as_deref() {
None => bail!("no `hlsl` subcommand specified; see `--help` for more details"),
Some("dxc") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Dxc)
}
Some("fxc") => {
ensure_remaining_args_empty(args)?;
Ok(Self::Fxc)
}
Some(other) => {
bail!("unrecognized `hlsl` subcommand {other:?}; see `--help` for more details")
}
})
}
}
fn ensure_remaining_args_empty(args: Arguments) -> anyhow::Result<()> {
let remaining_args = args.finish();
ensure!(
remaining_args.is_empty(),
"not all arguments were parsed (remaining: {remaining_args:?}); fix your invocation, \
please!"
);
Ok(())
}

10
xtask/src/fs.rs Normal file
View File

@@ -0,0 +1,10 @@
use std::{fs::File, path::Path};
use anyhow::Context;
pub(crate) use std::fs::*;
pub(crate) fn open_file(path: impl AsRef<Path>) -> anyhow::Result<File> {
let path = path.as_ref();
File::open(path).with_context(|| format!("failed to open {path:?}"))
}

37
xtask/src/glob.rs Normal file
View File

@@ -0,0 +1,37 @@
use std::path::Path;
use anyhow::Context;
use glob::glob;
use crate::result::{ErrorStatus, LogIfError};
pub(crate) fn visit_files(
path: impl AsRef<Path>,
glob_expr: &str,
mut f: impl FnMut(&Path) -> anyhow::Result<()>,
) -> ErrorStatus {
let path = path.as_ref();
let glob_expr = path.join(glob_expr);
let glob_expr = glob_expr.to_str().unwrap();
let mut status = ErrorStatus::NoFailuresFound;
glob(glob_expr)
.context("glob pattern {path:?} is invalid")
.unwrap()
.for_each(|path_res| {
if let Some(path) = path_res
.with_context(|| format!("error while iterating over glob {path:?}"))
.log_if_err_found(&mut status)
{
if path
.metadata()
.with_context(|| format!("failed to fetch metadata for {path:?}"))
.log_if_err_found(&mut status)
.map_or(false, |m| m.is_file())
{
f(&path).log_if_err_found(&mut status);
}
}
});
status
}

305
xtask/src/main.rs Normal file
View File

@@ -0,0 +1,305 @@
use std::{
io::{BufRead, BufReader},
path::Path,
process::{ExitCode, Stdio},
};
use anyhow::{bail, Context};
use cli::Args;
use crate::{
cli::{Subcommand, ValidateHlslCommand, ValidateSubcommand},
fs::{open_file, remove_dir_all},
glob::visit_files,
path::join_path,
process::{which, EasyCommand},
result::{ErrorStatus, LogIfError},
};
mod cli;
mod fs;
mod glob;
mod path;
mod process;
mod result;
fn main() -> ExitCode {
env_logger::builder()
.filter_level(log::LevelFilter::Info)
.parse_default_env()
.format_indent(Some(0))
.init();
let args = Args::parse();
match run(args) {
Ok(()) => ExitCode::SUCCESS,
Err(e) => {
log::error!("{e:?}");
ExitCode::FAILURE
}
}
}
fn run(args: Args) -> anyhow::Result<()> {
let snapshots_base_out = join_path(["tests", "out"]);
let Args { subcommand } = args;
assert!(which("cargo").is_ok());
match subcommand {
Subcommand::All => {
EasyCommand::simple("cargo", ["fmt"]).success()?;
EasyCommand::simple("cargo", ["test", "--all-features", "--workspace"]).success()?;
EasyCommand::simple(
"cargo",
[
"clippy",
"--all-features",
"--workspace",
"--",
"-D",
"warnings",
],
)
.success()?;
Ok(())
}
Subcommand::Bench { clean } => {
if clean {
let criterion_artifact_dir = join_path(["target", "criterion"]);
log::info!("removing {}", criterion_artifact_dir.display());
remove_dir_all(&criterion_artifact_dir)
.with_context(|| format!("failed to remove {criterion_artifact_dir:?}"))?;
}
EasyCommand::simple("cargo", ["bench"]).success()
}
Subcommand::Validate(cmd) => {
let ack_visiting = |path: &Path| log::info!("Validating {}", path.display());
let err_status = match cmd {
ValidateSubcommand::Spirv => {
let spirv_as = "spirv-as";
which(spirv_as)?;
let spirv_val = "spirv-val";
which(spirv_val)?;
visit_files(snapshots_base_out, "spv/*.spvasm", |path| {
ack_visiting(path);
let second_line = {
let mut file = BufReader::new(open_file(path)?);
let mut buf = String::new();
file.read_line(&mut buf).with_context(|| {
format!("failed to read first line from {path:?}")
})?;
buf.clear();
file.read_line(&mut buf).with_context(|| {
format!("failed to read second line from {path:?}")
})?;
buf
};
let expected_header_prefix = "; Version: ";
let Some(version) =
second_line.strip_prefix(expected_header_prefix) else {
bail!(
"no {expected_header_prefix:?} header found in {path:?}"
);
};
let file = open_file(path)?;
let mut spirv_as_cmd = EasyCommand::new(spirv_as, |cmd| {
cmd.stdin(Stdio::from(file))
.stdout(Stdio::piped())
.arg("--target-env")
.arg(format!("spv{version}"))
.args(["-", "-o", "-"])
});
let child = spirv_as_cmd
.spawn()
.with_context(|| format!("failed to spawn {cmd:?}"))?;
EasyCommand::new(spirv_val, |cmd| cmd.stdin(child.stdout.unwrap()))
.success()
})
}
ValidateSubcommand::Metal => {
let xcrun = "xcrun";
which(xcrun)?;
visit_files(snapshots_base_out, "msl/*.msl", |path| {
ack_visiting(path);
let first_line = {
let mut file = BufReader::new(open_file(path)?);
let mut buf = String::new();
file.read_line(&mut buf)
.with_context(|| format!("failed to read header from {path:?}"))?;
buf
};
let expected_header_prefix = "// language: ";
let Some(language) =
first_line.strip_prefix(expected_header_prefix) else {
bail!(
"no {expected_header_prefix:?} header found in {path:?}"
);
};
let language = language.strip_suffix('\n').unwrap_or(language);
let file = open_file(path)?;
EasyCommand::new(xcrun, |cmd| {
cmd.stdin(Stdio::from(file))
.args(["-sdk", "macosx", "metal", "-mmacosx-version-min=10.11"])
.arg(format!("-std=macos-{language}"))
.args(["-x", "metal", "-", "-o", "/dev/null"])
})
.success()
})
}
ValidateSubcommand::Glsl => {
let glslang_validator = "glslangValidator";
which(glslang_validator)?;
let mut err_status = ErrorStatus::NoFailuresFound;
for (glob, type_arg) in [
("glsl/*.Vertex.glsl", "vert"),
("glsl/*.Fragment.glsl", "frag"),
("glsl/*.Compute.glsl", "comp"),
] {
let type_err_status = visit_files(&snapshots_base_out, glob, |path| {
ack_visiting(path);
let file = open_file(path)?;
EasyCommand::new(glslang_validator, |cmd| {
cmd.stdin(Stdio::from(file))
.args(["--stdin", "-S"])
.arg(type_arg)
})
.success()
});
err_status = err_status.merge(type_err_status);
}
err_status
}
ValidateSubcommand::Dot => {
let dot = "dot";
which(dot)?;
visit_files(snapshots_base_out, "dot/*.dot", |path| {
ack_visiting(path);
let file = open_file(path)?;
EasyCommand::new(dot, |cmd| {
cmd.stdin(Stdio::from(file)).stdout(Stdio::null())
})
.success()
})
}
ValidateSubcommand::Wgsl => {
visit_files(snapshots_base_out, "wgsl/*.wgsl", |path| {
ack_visiting(path);
EasyCommand::new("cargo", |cmd| cmd.args(["run", "--"]).arg(path)).success()
})
}
ValidateSubcommand::Hlsl(cmd) => {
let visit_hlsl = |consume_config_item: &mut dyn FnMut(
&Path,
hlsl_snapshots::ConfigItem,
)
-> anyhow::Result<()>| {
visit_files(snapshots_base_out, "hlsl/*.hlsl", |path| {
ack_visiting(path);
let hlsl_snapshots::Config {
vertex,
fragment,
compute,
} = hlsl_snapshots::Config::from_path(path.with_extension("ron"))?;
let mut status = ErrorStatus::NoFailuresFound;
[vertex, fragment, compute]
.into_iter()
.flatten()
.for_each(|shader| {
consume_config_item(path, shader).log_if_err_found(&mut status);
});
match status {
ErrorStatus::NoFailuresFound => Ok(()),
ErrorStatus::OneOrMoreFailuresFound => bail!(
"one or more shader HLSL shader tests failed for {}",
path.display()
),
}
})
};
let validate = |bin, file: &_, config_item, params: &[_]| {
let hlsl_snapshots::ConfigItem {
entry_point,
target_profile,
} = config_item;
EasyCommand::new(bin, |cmd| {
cmd.arg(file)
.arg("-T")
.arg(&target_profile)
.arg("-E")
.arg(&entry_point)
.args(params)
.stdout(Stdio::null())
})
.success()
.with_context(|| {
format!(
"failed to validate entry point {entry_point:?} with profile \
{target_profile:?}"
)
})
};
match cmd {
ValidateHlslCommand::Dxc => {
let bin = "dxc";
which(bin)?;
visit_hlsl(&mut |file, config_item| {
// Reference:
// <https://github.com/microsoft/DirectXShaderCompiler/blob/6ee4074a4b43fa23bf5ad27e4f6cafc6b835e437/tools/clang/docs/UsingDxc.rst>.
validate(
bin,
file,
config_item,
&["-Wno-parentheses-equality", "-Zi", "-Qembed_debug", "-Od"],
)
})
}
ValidateHlslCommand::Fxc => {
let bin = "fxc";
which(bin)?;
visit_hlsl(&mut |file, config_item| {
let Some(Ok(shader_model_major_version)) = config_item
.target_profile
.split('_')
.nth(1)
.map(|segment| segment.parse::<u8>()) else {
bail!(
"expected target profile of the form \
`{{model}}_{{major}}_{{minor}}`, found invalid target \
profile {:?} in file {}",
config_item.target_profile,
file.display()
)
};
// NOTE: This isn't implemented by `fxc.exe`; see
// <https://learn.microsoft.com/en-us/windows/win32/direct3dtools/dx-graphics-tools-fxc-syntax#profiles>.
if shader_model_major_version < 6 {
// Reference:
// <https://learn.microsoft.com/en-us/windows/win32/direct3dtools/dx-graphics-tools-fxc-syntax>.
validate(bin, file, config_item, &["-Zi", "-Od"])
} else {
log::debug!(
"skipping config. item {config_item:?} because the \
shader model major version is > 6"
);
Ok(())
}
})
}
}
}
};
match err_status {
ErrorStatus::NoFailuresFound => Ok(()),
ErrorStatus::OneOrMoreFailuresFound => {
bail!("failed to validate one or more files, see above output for more details")
}
}
}
}
}

11
xtask/src/path.rs Normal file
View File

@@ -0,0 +1,11 @@
use std::path::{Path, PathBuf};
pub(crate) fn join_path<P, I>(iter: I) -> PathBuf
where
P: AsRef<Path>,
I: IntoIterator<Item = P>,
{
let mut path = PathBuf::new();
path.extend(iter);
path
}

78
xtask/src/process.rs Normal file
View File

@@ -0,0 +1,78 @@
use std::{
ffi::{OsStr, OsString},
fmt::{self, Display},
iter::once,
ops::{Deref, DerefMut},
process::Command,
};
use anyhow::{ensure, Context};
#[derive(Debug)]
pub(crate) struct EasyCommand {
inner: Command,
}
impl EasyCommand {
pub fn new<C>(cmd: C, config: impl FnOnce(&mut Command) -> &mut Command) -> Self
where
C: AsRef<OsStr>,
{
let mut inner = Command::new(cmd);
config(&mut inner);
Self { inner }
}
pub fn simple<C, A, I>(cmd: C, args: I) -> Self
where
C: AsRef<OsStr>,
A: AsRef<OsStr>,
I: IntoIterator<Item = A>,
{
Self::new(cmd, |cmd| cmd.args(args))
}
pub fn success(&mut self) -> anyhow::Result<()> {
let Self { inner } = self;
log::debug!("running {inner:?}");
let status = inner
.status()
.with_context(|| format!("failed to run {self}"))?;
ensure!(
status.success(),
"{self} failed to run; exit code: {:?}",
status.code()
);
Ok(())
}
}
impl Deref for EasyCommand {
type Target = Command;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for EasyCommand {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
pub(crate) fn which(binary_name: &str) -> anyhow::Result<OsString> {
::which::which(binary_name)
.with_context(|| format!("unable to find `{binary_name}` binary"))
.map(|buf| buf.file_name().unwrap().to_owned())
}
impl Display for EasyCommand {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self { inner } = self;
let prog = inner.get_program().to_string_lossy();
let args = inner.get_args().map(|a| a.to_string_lossy());
let shell_words = shell_words::join(once(prog).chain(args));
write!(f, "`{shell_words}`")
}
}

33
xtask/src/result.rs Normal file
View File

@@ -0,0 +1,33 @@
#[derive(Clone, Copy, Debug)]
pub(crate) enum ErrorStatus {
NoFailuresFound,
OneOrMoreFailuresFound,
}
impl ErrorStatus {
pub(crate) fn merge(self, other: Self) -> Self {
match (self, other) {
(Self::OneOrMoreFailuresFound, _) | (_, Self::OneOrMoreFailuresFound) => {
Self::OneOrMoreFailuresFound
}
(Self::NoFailuresFound, Self::NoFailuresFound) => Self::NoFailuresFound,
}
}
}
pub(crate) trait LogIfError<T> {
fn log_if_err_found(self, status: &mut ErrorStatus) -> Option<T>;
}
impl<T> LogIfError<T> for anyhow::Result<T> {
fn log_if_err_found(self, status: &mut ErrorStatus) -> Option<T> {
match self {
Ok(t) => Some(t),
Err(e) => {
log::error!("{e:?}");
*status = status.merge(ErrorStatus::OneOrMoreFailuresFound);
None
}
}
}
}