diff --git a/.github/workflows/build-cache.yml b/.github/workflows/build-cache.yml index 4b3de2fbb..48307b9a7 100644 --- a/.github/workflows/build-cache.yml +++ b/.github/workflows/build-cache.yml @@ -24,16 +24,16 @@ jobs: ##### The block below is shared between cache build and PR build workflows ##### - name: Install EStarkPolygon prover dependencies run: sudo apt-get update && sudo apt-get install -y nlohmann-json3-dev libpqxx-dev nasm libgrpc++-dev uuid-dev - - name: Install Rust toolchain nightly-2024-12-17 (with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu - - name: Install Rust toolchain 1.81 (stable) - run: rustup toolchain install 1.81-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14 (with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu + - name: Install Rust toolchain 1.85 (stable) + run: rustup toolchain install 1.85-x86_64-unknown-linux-gnu - name: Set cargo to perform shallow clones run: echo "CARGO_NET_GIT_FETCH_WITH_CLI=true" >> $GITHUB_ENV - name: Format run: cargo fmt --all --check --verbose - - name: Cargo check with Rust 1.81 (default features) - run: cargo +1.81-x86_64-unknown-linux-gnu check --all-targets + - name: Cargo check with Rust 1.85 (default features) + run: cargo +1.85-x86_64-unknown-linux-gnu check --all-targets - name: Lint no default features run: cargo clippy --all --all-targets --no-default-features --profile pr-tests --verbose -- -D warnings - name: Lint all features diff --git a/.github/workflows/nightly-tests.yml b/.github/workflows/nightly-tests.yml index bf8459c13..4e9e06aea 100644 --- a/.github/workflows/nightly-tests.yml +++ b/.github/workflows/nightly-tests.yml @@ -59,14 +59,14 @@ jobs: ~/.cargo/git target key: ${{ runner.os }}-cargo-release-${{ hashFiles('**/Cargo.toml') }} - - name: Install Rust toolchain 1.81 - run: rustup toolchain install 1.81-x86_64-unknown-linux-gnu + - name: Install Rust toolchain 1.85 + run: rustup toolchain install 1.85-x86_64-unknown-linux-gnu - name: Install nightly - run: rustup toolchain install nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install std source - run: rustup component add rust-src --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup component add rust-src --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install riscv target - run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install test dependencies run: sudo apt-get update && sudo apt-get install -y binutils-riscv64-unknown-elf lld - name: Install EStarkPolygon prover dependencies diff --git a/.github/workflows/pr-tests.yml b/.github/workflows/pr-tests.yml index 345777be4..68ab80744 100644 --- a/.github/workflows/pr-tests.yml +++ b/.github/workflows/pr-tests.yml @@ -47,16 +47,16 @@ jobs: ##### The block below is shared between cache build and PR build workflows ##### - name: Install EStarkPolygon prover dependencies run: sudo apt-get update && sudo apt-get install -y nlohmann-json3-dev libpqxx-dev nasm libgrpc++-dev uuid-dev - - name: Install Rust toolchain nightly-2024-12-17 (with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu - - name: Install Rust toolchain 1.81 (stable) - run: rustup toolchain install 1.81-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14 (with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu + - name: Install Rust toolchain 1.85 (stable) + run: rustup toolchain install 1.85-x86_64-unknown-linux-gnu - name: Set cargo to perform shallow clones run: echo "CARGO_NET_GIT_FETCH_WITH_CLI=true" >> $GITHUB_ENV - name: Format run: cargo fmt --all --check --verbose - - name: Cargo check with Rust 1.81 (default features) - run: cargo +1.81-x86_64-unknown-linux-gnu check --all-targets + - name: Cargo check with Rust 1.85 (default features) + run: cargo +1.85-x86_64-unknown-linux-gnu check --all-targets - name: Lint no default features run: cargo clippy --all --all-targets --no-default-features --profile pr-tests --verbose -- -D warnings - name: Lint all features @@ -95,16 +95,18 @@ jobs: uses: actions/download-artifact@v4 with: name: tests_archive - - name: Install Rust toolchain nightly-2024-12-17 (with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu - - name: Install nightly-2024-08-01 - run: rustup toolchain install nightly-2024-08-01-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14 (with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install std source - run: rustup component add rust-src --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup component add rust-src --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install riscv target - run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install test dependencies run: sudo apt-get update && sudo apt-get install -y binutils-riscv64-unknown-elf lld + - name: Install Rust deps + run: rustup install nightly-2025-05-14 && rustup component add rust-src --toolchain nightly-2025-05-14 + - name: Install Rust deps + run: rustup install nightly-2025-02-14 && rustup component add rust-src --toolchain nightly-2025-02-14 - uses: taiki-e/install-action@nextest - name: Run default tests run: cargo nextest run --archive-file tests.tar.zst --workspace-remap . --verbose --partition count:"${{ matrix.test }}"/2 --no-tests=warn @@ -119,14 +121,12 @@ jobs: with: submodules: recursive # Do not use the cache because we are compiling a different version of powdr. - - name: Install Rust toolchain nightly-2024-12-17 (with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu - - name: Install nightly - run: rustup toolchain install nightly-2024-08-01-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14 (with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install std source - run: rustup component add rust-src --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup component add rust-src --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install riscv target - run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install test dependencies run: sudo apt-get update && sudo apt-get install -y binutils-riscv64-unknown-elf lld - name: Run examples that cargo accepts as examples @@ -135,8 +135,6 @@ jobs: run: cd powdr-test/examples/serialized-inputs && cargo run -r - name: Run crate example fibonacci with the given branch run: cd powdr-test/examples/fibonacci && cargo run -r - - name: Run crate example fibonacci with the latest powdr release - run: cd examples/fibonacci && cargo run -r test_estark_polygon: needs: build @@ -149,14 +147,12 @@ jobs: uses: actions/download-artifact@v4 with: name: tests_archive - - name: Install Rust toolchain nightly-2024-12-17(with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu - - name: Install nightly-2024-08-01 - run: rustup toolchain install nightly-2024-08-01-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14(with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install std source - run: rustup component add rust-src --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup component add rust-src --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install riscv target - run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install EStarkPolygon prover system dependency run: sudo apt-get update && sudo apt-get install -y nlohmann-json3-dev - uses: taiki-e/install-action@nextest @@ -199,16 +195,18 @@ jobs: uses: actions/download-artifact@v4 with: name: tests_archive - - name: Install Rust toolchain nightly-2024-12-17 (with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14 (with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install test dependencies run: sudo apt-get update && sudo apt-get install -y binutils-riscv64-unknown-elf lld - - name: Install nightly-2024-08-01 - run: rustup toolchain install nightly-2024-08-01-x86_64-unknown-linux-gnu + - name: Install Rust deps + run: rustup install nightly-2025-05-14 && rustup component add rust-src --toolchain nightly-2025-05-14 + - name: Install Rust deps + run: rustup install nightly-2025-02-14 && rustup component add rust-src --toolchain nightly-2025-02-14 - name: Install std source - run: rustup component add rust-src --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup component add rust-src --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install riscv target - run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - uses: taiki-e/install-action@nextest - name: Run slow tests run: | @@ -240,16 +238,14 @@ jobs: target/ Cargo.lock key: ${{ runner.os }}-cargo-pr-tests - - name: Install Rust toolchain nightly-2024-12-17 (with clippy and rustfmt) - run: rustup toolchain install nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2024-12-17-x86_64-unknown-linux-gnu - - name: Install Rust toolchain 1.81 - run: rustup toolchain install 1.81-x86_64-unknown-linux-gnu - - name: Install nightly - run: rustup toolchain install nightly-2024-08-01-x86_64-unknown-linux-gnu + - name: Install Rust toolchain nightly-2025-05-14 (with clippy and rustfmt) + run: rustup toolchain install nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add clippy --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu && rustup component add rustfmt --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu + - name: Install Rust toolchain 1.85 + run: rustup toolchain install 1.85-x86_64-unknown-linux-gnu - name: Install std source - run: rustup component add rust-src --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup component add rust-src --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install riscv target - run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2024-08-01-x86_64-unknown-linux-gnu + run: rustup target add riscv32imac-unknown-none-elf --toolchain nightly-2025-05-14-x86_64-unknown-linux-gnu - name: Install test dependencies run: sudo apt-get update && sudo apt-get install -y binutils-riscv64-unknown-elf lld - name: Install EStarkPolygon prover dependencies diff --git a/Cargo.toml b/Cargo.toml index 58c9cf616..59f370c43 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,6 +33,8 @@ members = [ "backend-utils", "executor-utils", "autoprecompiles", + "openvm", + "cli-openvm", ] exclude = ["riscv-runtime"] @@ -72,6 +74,8 @@ powdr-riscv = { path = "./riscv", version = "0.1.4" } powdr-riscv-executor = { path = "./riscv-executor", version = "0.1.4" } powdr-syscalls = { path = "./syscalls", version = "0.1.4" } powdr-schemas = { path = "./schemas", version = "0.1.4" } +powdr-autoprecompiles = { path = "./autoprecompiles", version = "0.1.4" } +powdr-openvm = { path = "./openvm", version = "0.1.4" } [profile.pr-tests] inherits = "dev" diff --git a/ast/src/asm_analysis/display.rs b/ast/src/asm_analysis/display.rs index 389c8f9bc..21ef7bcba 100644 --- a/ast/src/asm_analysis/display.rs +++ b/ast/src/asm_analysis/display.rs @@ -138,9 +138,11 @@ impl Display for SubmachineDeclaration { "{} {}{}", self.ty, self.name, - (!self.args.is_empty()) - .then(|| format!("({})", self.args.iter().format(", "))) - .unwrap_or_default() + if !self.args.is_empty() { + format!("({})", self.args.iter().format(", ")) + } else { + Default::default() + } ) } } diff --git a/ast/src/asm_analysis/mod.rs b/ast/src/asm_analysis/mod.rs index 56565f450..007523e2a 100644 --- a/ast/src/asm_analysis/mod.rs +++ b/ast/src/asm_analysis/mod.rs @@ -5,7 +5,7 @@ use std::{ btree_map::{IntoIter, Iter, IterMut}, BTreeMap, BTreeSet, HashSet, }, - iter::{once, repeat}, + iter::{once, repeat_n}, ops::ControlFlow, }; @@ -164,13 +164,13 @@ impl FunctionStatements { pub fn iter_batches(&self) -> impl Iterator { match &self.batches { Some(batches) => Either::Left(batches.iter()), - None => Either::Right( - repeat(&BatchMetadata { + None => Either::Right(repeat_n( + &BatchMetadata { size: 1, reason: None, - }) - .take(self.inner.len()), - ), + }, + self.inner.len(), + )), } .scan(0, move |start, batch| { let res = BatchRef { @@ -191,13 +191,13 @@ impl FunctionStatements { match self.batches { Some(batches) => Either::Left(batches.into_iter()), - None => Either::Right( - repeat(BatchMetadata { + None => Either::Right(repeat_n( + BatchMetadata { size: 1, reason: None, - }) - .take(len), - ), + }, + len, + )), } .map(move |batch| Batch { reason: batch.reason, diff --git a/ast/src/parsed/asm.rs b/ast/src/parsed/asm.rs index 4cd897eec..635e9c59a 100644 --- a/ast/src/parsed/asm.rs +++ b/ast/src/parsed/asm.rs @@ -1,6 +1,6 @@ use std::{ fmt::{Display, Formatter}, - iter::{empty, once, repeat}, + iter::{empty, once, repeat_n}, str::FromStr, }; @@ -52,6 +52,7 @@ pub struct SymbolDefinition { pub value: SymbolValue, } +#[allow(clippy::large_enum_variant)] #[derive(Debug, Clone, PartialEq, Eq, From)] pub enum SymbolValue { /// A machine definition @@ -304,17 +305,19 @@ impl AbsoluteSymbolPath { let common_prefix_len = self.common_prefix(base).parts.len(); // Start with max(0, base.parts.len() - common_root.parts.len()) // repetitions of "super". - let parts = repeat(Part::Super) - .take(base.parts.len().saturating_sub(common_prefix_len)) - // append the parts of self after the common root. - .chain( - self.parts - .iter() - .skip(common_prefix_len) - .cloned() - .map(Part::Named), - ) - .collect(); + let parts = repeat_n( + Part::Super, + base.parts.len().saturating_sub(common_prefix_len), + ) + // append the parts of self after the common root. + .chain( + self.parts + .iter() + .skip(common_prefix_len) + .cloned() + .map(Part::Named), + ) + .collect(); SymbolPath { parts } } diff --git a/autoprecompiles/src/powdr.rs b/autoprecompiles/src/powdr.rs index de1686d30..6f0181d69 100644 --- a/autoprecompiles/src/powdr.rs +++ b/autoprecompiles/src/powdr.rs @@ -25,7 +25,7 @@ pub fn substitute_algebraic( expr: &mut AlgebraicExpression, sub: &BTreeMap>, ) { - expr.visit_expressions_mut( + let _ = expr.visit_expressions_mut( &mut |expr| { if let AlgebraicExpression::Reference(r) = expr { if let Some(sub_expr) = sub.get(&Column::from(&*r)) { @@ -40,7 +40,7 @@ pub fn substitute_algebraic( pub fn make_refs_zero(expr: &mut AlgebraicExpression) { let zero = AlgebraicExpression::Number(T::zero()); - expr.visit_expressions_mut( + let _ = expr.visit_expressions_mut( &mut |expr| { if let AlgebraicExpression::Reference(AlgebraicReference { .. }) = expr { *expr = zero.clone(); @@ -63,7 +63,7 @@ pub fn has_ref( r: &AlgebraicExpression, ) -> bool { let mut seen = false; - expr.visit_expressions( + let _ = expr.visit_expressions( &mut |expr| { if expr == r { seen = true; @@ -85,7 +85,7 @@ pub fn substitute_algebraic_algebraic( expr: &mut AlgebraicExpression, sub: &BTreeMap, AlgebraicExpression>, ) { - expr.visit_expressions_mut( + let _ = expr.visit_expressions_mut( &mut |expr| { if let Some(sub_expr) = sub.get(expr) { *expr = sub_expr.clone(); @@ -101,7 +101,7 @@ pub fn collect_cols_algebraic( expr: &AlgebraicExpression, ) -> BTreeSet> { let mut cols: BTreeSet> = Default::default(); - expr.visit_expressions( + let _ = expr.visit_expressions( &mut |expr| { if let AlgebraicExpression::Reference(AlgebraicReference { poly_id: @@ -190,7 +190,7 @@ pub fn reassign_ids( .collect(); // Update the machine with the new global column names - machine.visit_expressions_mut( + let _ = machine.visit_expressions_mut( &mut |e| { if let AlgebraicExpression::Reference(r) = e { let new_col = subs.get(&Column::from(&*r)).unwrap().clone(); @@ -221,7 +221,7 @@ pub fn reassign_ids( } pub fn substitute(expr: &mut Expression, sub: &BTreeMap) { - expr.visit_expressions_mut( + let _ = expr.visit_expressions_mut( &mut |expr| { match expr { Expression::Reference(_, ref mut r) => { diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 0b9d891d5..956517456 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -63,7 +63,7 @@ p3-commit = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf2 ], optional = true } p3-matrix = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", optional = true } p3-uni-stark = { git = "https://github.com/plonky3/Plonky3.git", rev = "2192432ddf28e7359dd2c577447886463e6124f0", optional = true } -stwo-prover = { git = "https://github.com/starkware-libs/stwo.git", optional = true, rev = "81d1fe349b490089f65723ad49ef72b9d09495ba", features = [ +stwo-prover = { git = "https://github.com/starkware-libs/stwo.git", optional = true, rev = "c26d2ab", features = [ "parallel", ] } diff --git a/backend/src/mock/connection_constraint_checker.rs b/backend/src/mock/connection_constraint_checker.rs index 1a6f0f75d..4acc5136f 100644 --- a/backend/src/mock/connection_constraint_checker.rs +++ b/backend/src/mock/connection_constraint_checker.rs @@ -159,16 +159,18 @@ impl<'a, F: FieldElement> ConnectionConstraintChecker<'a, F> { .flatten() .collect::>(); - (!errors.is_empty()) - .then(|| { + if !errors.is_empty() { + { let error = Errors { connection_count: self.connections.len(), errors, }; log::error!("{error}"); Err(error) - }) - .unwrap_or(Ok(())) + } + } else { + Ok(()) + } } /// Checks a single connection. diff --git a/backend/src/stwo/prover.rs b/backend/src/stwo/prover.rs index 806d82dd9..c643cf2bd 100644 --- a/backend/src/stwo/prover.rs +++ b/backend/src/stwo/prover.rs @@ -13,7 +13,7 @@ use tracing::{span, Level}; extern crate alloc; use alloc::collections::{btree_map::BTreeMap, btree_set::BTreeSet}; -use std::iter::repeat; +use std::iter::repeat_n; use std::marker::PhantomData; use std::sync::Arc; use std::{fmt, io}; @@ -571,7 +571,8 @@ where let constant_col_log_sizes = iter .clone() .flat_map(|(pil, machine_log_size)| { - repeat(machine_log_size).take( + repeat_n( + machine_log_size, pil.constant_count() + get_constant_with_next_list(pil).len() + pil.publics_count(), @@ -582,14 +583,14 @@ where let stage0_witness_col_log_sizes = iter .clone() .flat_map(|(pil, machine_log_size)| { - repeat(machine_log_size).take(pil.stage_commitment_count(0)) + repeat_n(machine_log_size, pil.stage_commitment_count(0)) }) .collect_vec(); let stage1_witness_col_log_sizes = iter .clone() .flat_map(|(pil, machine_log_size)| { - repeat(machine_log_size).take(pil.stage_commitment_count(1)) + repeat_n(machine_log_size, pil.stage_commitment_count(1)) }) .collect_vec(); diff --git a/cargo-powdr/template/rust-toolchain.toml b/cargo-powdr/template/rust-toolchain.toml index 690b698f9..7ee67e848 100644 --- a/cargo-powdr/template/rust-toolchain.toml +++ b/cargo-powdr/template/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2024-12-17" +channel = "nightly-2025-05-14" diff --git a/cli-openvm/Cargo.toml b/cli-openvm/Cargo.toml new file mode 100644 index 000000000..e6c19db96 --- /dev/null +++ b/cli-openvm/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "cli-openvm" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[[bin]] +name = "powdr_openvm" +path = "src/main.rs" +bench = false # See https://github.com/bheisler/criterion.rs/issues/458 + +[dependencies] +openvm-sdk = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-stark-sdk = { git = "https://github.com/powdr-labs/stark-backend.git", rev = "dacb25f", default-features = false } +openvm-stark-backend = { git = "https://github.com/powdr-labs/stark-backend.git", rev = "dacb25f", default-features = false } + +powdr-openvm.workspace = true + +eyre = "0.6.12" +tracing = "0.1.40" + +clap = { version = "^4.3", features = ["derive"] } + +serde_cbor = "0.11.2" + +[lints] +workspace = true diff --git a/cli-openvm/README.md b/cli-openvm/README.md new file mode 100644 index 000000000..b53cd3467 --- /dev/null +++ b/cli-openvm/README.md @@ -0,0 +1,30 @@ +# cli-openvm + +Use command `execute` to run the program only, and `prove` to prove. +The `prove` command has a `mock` option to only check the constraints. + +Examples: + +```sh +# Run the original program +RUSTFLAGS='-C target-cpu=native' cargo run -r execute guest +# Prove the original program +RUSTFLAGS='-C target-cpu=native' cargo run -r prove guest +# Check the constraints and witness of the original program +RUSTFLAGS='-C target-cpu=native' cargo run -r prove guest --mock +# Run the program with autoprecompiles +RUSTFLAGS='-C target-cpu=native' cargo run -r execute guest --skip 37 --autoprecompiles 1 +# Run the program with optimized autoprecompiles +RUSTFLAGS='-C target-cpu=native' cargo run -r execute guest --skip 37 --autoprecompiles 1 --optimize +# Prove the program with autoprecompiles +RUSTFLAGS='-C target-cpu=native' cargo run -r prove guest --skip 37 --autoprecompiles 1 +# Prove the program with optimized autoprecompiles +RUSTFLAGS='-C target-cpu=native' cargo run -r prove guest --skip 37 --autoprecompiles 1 --optimize +# Check the constraints and witness of the program with autoprecompiles +RUSTFLAGS='-C target-cpu=native' cargo run -r prove guest --skip 37 --autoprecompiles 1 --mock +# Check the constraints and witness of the program with optimized autoprecompiles +RUSTFLAGS='-C target-cpu=native' cargo run -r prove guest --skip 37 --autoprecompiles 1 --mock --optimize +``` + +It is recommended to use at least `RUST_LOG=info` for information, and `RUST_LOG=debug` for benchmarks. + diff --git a/cli-openvm/src/main.rs b/cli-openvm/src/main.rs new file mode 100644 index 000000000..84924bc6e --- /dev/null +++ b/cli-openvm/src/main.rs @@ -0,0 +1,170 @@ +use eyre::Result; +use openvm_sdk::StdIn; +use openvm_stark_backend::p3_field::PrimeField32; +use openvm_stark_sdk::config::setup_tracing_with_log_level; +use powdr_openvm::CompiledProgram; + +use clap::{CommandFactory, Parser, Subcommand}; +use std::io; +use tracing::Level; + +#[derive(Parser)] +#[command(name = "powdr-openvm", author, version, about, long_about = None)] +struct Cli { + #[command(subcommand)] + command: Option, +} + +#[derive(Subcommand)] +enum Commands { + Compile { + guest: String, + + #[arg(long, default_value_t = 0)] + autoprecompiles: usize, + + #[arg(long, default_value_t = 0)] + skip: usize, + + #[arg(long, default_value_t = false)] + pgo: bool, + + #[arg(long)] + input: Option, + }, + + Execute { + guest: String, + + #[arg(long, default_value_t = 0)] + autoprecompiles: usize, + + #[arg(long, default_value_t = 0)] + skip: usize, + + #[arg(long, default_value_t = false)] + pgo: bool, + + #[arg(long)] + input: Option, + }, + + Pgo { + guest: String, + + #[arg(long)] + input: Option, + }, + + Prove { + guest: String, + + #[arg(long, default_value_t = 0)] + autoprecompiles: usize, + + #[arg(long, default_value_t = 0)] + skip: usize, + + #[arg(long)] + #[arg(default_value_t = false)] + mock: bool, + + #[arg(long)] + #[arg(default_value_t = false)] + recursion: bool, + + #[arg(long, default_value_t = false)] + pgo: bool, + + #[arg(long)] + input: Option, + }, +} + +fn main() -> Result<(), io::Error> { + let args = Cli::parse(); + + setup_tracing_with_log_level(Level::WARN); + + if let Some(command) = args.command { + run_command(command); + Ok(()) + } else { + Cli::command().print_help() + } +} + +fn run_command(command: Commands) { + match command { + Commands::Compile { + guest, + autoprecompiles, + skip, + pgo, + input, + } => { + let pc_idx_count = + pgo.then(|| powdr_openvm::get_pc_idx_count(&guest, stdin_from(input))); + let program = + powdr_openvm::compile_guest(&guest, autoprecompiles, skip, pc_idx_count).unwrap(); + write_program_to_file(program, &format!("{guest}_compiled.cbor")).unwrap(); + } + + Commands::Execute { + guest, + autoprecompiles, + skip, + pgo, + input, + } => { + let pc_idx_count = + pgo.then(|| powdr_openvm::get_pc_idx_count(&guest, stdin_from(input))); + let program = + powdr_openvm::compile_guest(&guest, autoprecompiles, skip, pc_idx_count).unwrap(); + powdr_openvm::execute(program, stdin_from(input)).unwrap(); + } + + Commands::Prove { + guest, + autoprecompiles, + skip, + mock, + recursion, + pgo, + input, + } => { + let pc_idx_count = + pgo.then(|| powdr_openvm::get_pc_idx_count(&guest, stdin_from(input))); + let program = + powdr_openvm::compile_guest(&guest, autoprecompiles, skip, pc_idx_count).unwrap(); + powdr_openvm::prove(&program, mock, recursion, stdin_from(input)).unwrap(); + } + + // Run Pgo on the original openvm program (without powdr extension) + // By default, Compile, Execute, and Prove all run Pgo first + // This command is only used to test the powdr_openvm::pgo API + Commands::Pgo { guest, input } => { + let program = powdr_openvm::compile_openvm(&guest).unwrap(); + powdr_openvm::pgo(program, stdin_from(input)).unwrap(); + } + } +} + +fn write_program_to_file( + program: CompiledProgram, + filename: &str, +) -> Result<(), io::Error> { + use std::fs::File; + + let mut file = File::create(filename)?; + serde_cbor::to_writer(&mut file, &program).map_err(io::Error::other)?; + Ok(()) +} + +fn stdin_from(input: Option) -> StdIn { + let mut s = StdIn::default(); + if let Some(i) = input { + s.write(&i) + } + s +} diff --git a/cli-rs/src/main.rs b/cli-rs/src/main.rs index fabfc92f0..7dac6feed 100644 --- a/cli-rs/src/main.rs +++ b/cli-rs/src/main.rs @@ -64,7 +64,7 @@ struct Cli { #[derive(Subcommand)] enum Commands { /// Compile rust code to Powdr assembly. - /// Needs `rustup component add rust-src --toolchain nightly-2024-08-01`. + /// Needs `rustup component add rust-src --toolchain nightly-2025-05-14`. Compile { /// input rust code, points to a crate dir or its Cargo.toml file file: String, @@ -409,8 +409,8 @@ fn execute_fast( ); let duration = start.elapsed(); - log::info!("Executor done in: {:?}", duration); - log::info!("Execution trace length: {}", trace_len); + log::info!("Executor done in: {duration:?}"); + log::info!("Execution trace length: {trace_len}"); Ok(()) } @@ -458,7 +458,7 @@ fn execute( ); let duration = start.elapsed(); - log::info!("Executor done in: {:?}", duration); + log::info!("Executor done in: {duration:?}"); log::info!("Execution trace length: {}", execution.trace_len); let witness_cols: Vec<_> = pil diff --git a/examples/fibonacci/Cargo.toml b/examples/fibonacci/Cargo.toml deleted file mode 100644 index 9797bfe5d..000000000 --- a/examples/fibonacci/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "fibonacci" -version = "0.1.0" -edition = "2021" - -[features] -default = [] -simd = ["powdr/plonky3-simd"] - -[dependencies] -powdr = { git = "https://github.com/powdr-labs/powdr", tag = "v0.1.3", features = [ - "plonky3", -] } - -env_logger = "0.10.2" -log = "0.4.27" - -[workspace] diff --git a/examples/fibonacci/README.md b/examples/fibonacci/README.md deleted file mode 100644 index 3397b95a6..000000000 --- a/examples/fibonacci/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# powdrVM Fibonacci example - -This example computes Fibonacci in the guest and demonstrates -the case when the execution trace is split into multiple chunks. - -If you're curious about what happens internally, run: -```console -RUST_LOG=info cargo run -r -``` diff --git a/examples/fibonacci/guest/Cargo.toml b/examples/fibonacci/guest/Cargo.toml deleted file mode 100644 index bc04c62b1..000000000 --- a/examples/fibonacci/guest/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "powdr-guest" -version = "0.1.0" -edition = "2021" - -[dependencies] -powdr-riscv-runtime = { git = "https://github.com/powdr-labs/powdr", tag = "v0.1.3", features = [ - "std", -] } - -[workspace] diff --git a/examples/fibonacci/guest/src/main.rs b/examples/fibonacci/guest/src/main.rs deleted file mode 100644 index f094c6f8f..000000000 --- a/examples/fibonacci/guest/src/main.rs +++ /dev/null @@ -1,20 +0,0 @@ -use powdr_riscv_runtime; -use powdr_riscv_runtime::commit; -use powdr_riscv_runtime::io::{read, write}; - -fn fib(n: u32) -> u32 { - if n <= 1 { - return n; - } - fib(n - 1) + fib(n - 2) -} - -fn main() { - // Read input from stdin. - let n: u32 = read(0); - let r = fib(n); - // Write result to stdout. - write(1, r); - // Commit the result as a public. - commit::commit(r); -} diff --git a/examples/fibonacci/rust-toolchain.toml b/examples/fibonacci/rust-toolchain.toml deleted file mode 100644 index 690b698f9..000000000 --- a/examples/fibonacci/rust-toolchain.toml +++ /dev/null @@ -1,2 +0,0 @@ -[toolchain] -channel = "nightly-2024-12-17" diff --git a/examples/fibonacci/src/main.rs b/examples/fibonacci/src/main.rs deleted file mode 100644 index 2da28fa1c..000000000 --- a/examples/fibonacci/src/main.rs +++ /dev/null @@ -1,19 +0,0 @@ -use powdr::Session; - -fn main() { - env_logger::init(); - - let n = 22; - let mut session = Session::builder() - .guest_path("./guest") - .out_path("powdr-target") - .chunk_size_log2(18) - .build() - // Compute Fibonacci of 21 in the guest. - .write(0, &n); - - // Fast dry run to test execution. - session.run(); - - session.prove(); -} diff --git a/examples/keccak/Cargo.toml b/examples/keccak/Cargo.toml deleted file mode 100644 index 3d3d795dd..000000000 --- a/examples/keccak/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "keccak" -version = "0.1.0" -edition = "2021" - -[features] -default = [] -simd = ["powdr/plonky3-simd"] - -[dependencies] -powdr = { git = "https://github.com/powdr-labs/powdr", tag = "v0.1.3", features = [ - "plonky3", -] } -hex = "0.4" - -env_logger = "0.10.2" -log = "0.4.27" - -[workspace] diff --git a/examples/keccak/README.md b/examples/keccak/README.md deleted file mode 100644 index 989ac3f59..000000000 --- a/examples/keccak/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# powdrVM tinykeccak example - -This example demonstrates how to use the `powdrVM` to run a simple -keccak hash function using the `tinykeccak` crate. - -We want to prove that we know the pre-image of a hash. -The host takes in a 32-byte challenge hash `C` as a hex string, -and the pre-image string `P`, such that `Keccak(P) = C`. - -The guest receives the same data from the host and verifies that the claim is true. - -For a valid hash example, you can run: - -```console -cargo run -r "56c73097b157bbe90a5b273a6bb93eb5e89ab1ac0364a73a4e107187c63f7256" "my powdr hash" -``` diff --git a/examples/keccak/guest/Cargo.toml b/examples/keccak/guest/Cargo.toml deleted file mode 100644 index 1d42ff379..000000000 --- a/examples/keccak/guest/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "powdr-guest" -version = "0.1.0" -edition = "2021" - -[dependencies] -powdr-riscv-runtime = { git = "https://github.com/powdr-labs/powdr", tag = "v0.1.3", features = [ - "std", -] } -tiny-keccak = { version = "2.0.2", features = ["keccak"] } - -[workspace] diff --git a/examples/keccak/guest/src/main.rs b/examples/keccak/guest/src/main.rs deleted file mode 100644 index 5efc46640..000000000 --- a/examples/keccak/guest/src/main.rs +++ /dev/null @@ -1,16 +0,0 @@ -use powdr_riscv_runtime; -use powdr_riscv_runtime::io::read; - -use tiny_keccak::{Hasher, Keccak}; - -fn main() { - let challenge: [u8; 32] = read(1); - let preimg: Vec = read(2); - - let mut output = [0u8; 32]; - let mut hasher = Keccak::v256(); - hasher.update(&preimg); - hasher.finalize(&mut output); - - assert_eq!(output, challenge); -} diff --git a/examples/keccak/rust-toolchain.toml b/examples/keccak/rust-toolchain.toml deleted file mode 100644 index 690b698f9..000000000 --- a/examples/keccak/rust-toolchain.toml +++ /dev/null @@ -1,2 +0,0 @@ -[toolchain] -channel = "nightly-2024-12-17" diff --git a/examples/keccak/src/main.rs b/examples/keccak/src/main.rs deleted file mode 100644 index ae3e16cbc..000000000 --- a/examples/keccak/src/main.rs +++ /dev/null @@ -1,45 +0,0 @@ -use powdr::Session; - -use hex::FromHex; -use std::convert::TryInto; -use std::env; - -fn main() { - env_logger::init(); - - let args: Vec = env::args().collect(); - - if args.len() < 3 { - panic!("Please provide two arguments: "); - } - - let challenge = parse_hash(&args[1]); - let preimg = args[2].clone().into_bytes(); - - let mut session = Session::builder() - .guest_path("./guest") - .out_path("powdr-target") - .chunk_size_log2(18) - .build() - .write(1, &challenge) - .write(2, &preimg); - - // Fast dry run to test execution. - session.run(); - - // Uncomment to compute the proof. - //session.prove(); -} - -fn parse_hash(s: &str) -> [u8; 32] { - match Vec::from_hex(s) { - Ok(bytes) => { - if bytes.len() == 32 { - bytes.try_into().expect("length checked to be 32") - } else { - panic!("The pre-image must be exactly 64 hex characters (32 bytes)."); - } - } - Err(e) => panic!("Error parsing the pre-image as hex: {e}"), - } -} diff --git a/executor/src/witgen/data_structures/padded_bitvec.rs b/executor/src/witgen/data_structures/padded_bitvec.rs index 7dc93730d..49d399dda 100644 --- a/executor/src/witgen/data_structures/padded_bitvec.rs +++ b/executor/src/witgen/data_structures/padded_bitvec.rs @@ -14,7 +14,7 @@ pub struct PaddedBitVec { impl PaddedBitVec { pub fn new(bits_per_row: usize) -> Self { - let words_per_row = (bits_per_row + 31) / 32; + let words_per_row = bits_per_row.div_ceil(32); Self { data: Vec::new(), bits_per_row, diff --git a/executor/src/witgen/jit/processor.rs b/executor/src/witgen/jit/processor.rs index becab1328..bbab56534 100644 --- a/executor/src/witgen/jit/processor.rs +++ b/executor/src/witgen/jit/processor.rs @@ -85,6 +85,7 @@ impl<'a, T: FieldElement> Processor<'a, T> { self } + #[allow(clippy::result_large_err)] pub fn generate_code>( self, can_process: impl CanProcessCall, @@ -125,6 +126,7 @@ impl<'a, T: FieldElement> Processor<'a, T> { self.generate_code_for_branch(can_process, witgen, identity_queue, branch_depth) } + #[allow(clippy::result_large_err)] fn generate_code_for_branch>( &self, can_process: impl CanProcessCall, diff --git a/executor/src/witgen/machines/mod.rs b/executor/src/witgen/machines/mod.rs index 68685a1b5..fb0d98f54 100644 --- a/executor/src/witgen/machines/mod.rs +++ b/executor/src/witgen/machines/mod.rs @@ -168,6 +168,7 @@ impl LookupCell<'_, T> { /// All known implementations of [Machine]. /// We cannot use Box> because the trait is not object-safe, /// since it has generic methods. +#[allow(clippy::large_enum_variant)] pub enum KnownMachine<'a, T: FieldElement> { SecondStageMachine(SecondStageMachine<'a, T>), SortedWitnesses(SortedWitnesses<'a, T>), diff --git a/openvm/.gitignore b/openvm/.gitignore new file mode 100644 index 000000000..223d1d45f --- /dev/null +++ b/openvm/.gitignore @@ -0,0 +1,24 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +#Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +# RustRover +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +guest/openvm +guest-keccak/target \ No newline at end of file diff --git a/openvm/Cargo.toml b/openvm/Cargo.toml new file mode 100644 index 000000000..ff54c8cc2 --- /dev/null +++ b/openvm/Cargo.toml @@ -0,0 +1,88 @@ +[package] +name = "powdr-openvm" +version.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +openvm = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-build = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-rv32im-circuit = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-rv32im-transpiler = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-rv32im-guest = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049", default-features = false } +openvm-transpiler = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-circuit = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-circuit-derive = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-circuit-primitives = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-circuit-primitives-derive = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-instructions = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-instructions-derive = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-sdk = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-ecc-transpiler = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-keccak256-circuit = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-keccak256-transpiler = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-sha256-circuit = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-sha256-transpiler = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-algebra-transpiler = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049" } +openvm-native-circuit = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049", default-features = false } +openvm-native-recursion = { git = "https://github.com/powdr-labs/openvm.git", rev = "2868049", default-features = false } + +openvm-stark-sdk = { git = "https://github.com/powdr-labs/stark-backend.git", rev = "dacb25f", default-features = false } +openvm-stark-backend = { git = "https://github.com/powdr-labs/stark-backend.git", rev = "dacb25f", default-features = false } + +powdr.workspace = true +powdr-autoprecompiles.workspace = true +powdr-constraint-solver.workspace = true + +eyre = "0.6.12" +serde = "1.0.217" +derive_more = { version = "2.0.1", default-features = false, features = [ + "from", +] } +strum = "0.26.3" +itertools = "0.14.0" + +tracing = "0.1.40" +tracing-subscriber = { version = "0.3.17", features = ["std", "env-filter"] } + +clap = { version = "^4.3", features = ["derive"] } + +log = "0.4.17" +serde_cbor = "0.11.2" + +[dev-dependencies] +test-log = { version = "0.2.17", features = ["trace"] } + +[lib] +bench = false # See https://github.com/bheisler/criterion.rs/issues/458 + +# Uncomment both patches below for local stark-backend and openvm. +# The local openvm also needs to have stark-backend patched so all types match. +#[patch."https://github.com/powdr-labs/stark-backend.git"] +#openvm-stark-sdk = { path = "../../stark-backend/crates/stark-sdk", default-features = false } +#openvm-stark-backend = { path = "../../stark-backend/crates/stark-backend", default-features = false } +# +#[patch."https://github.com/powdr-labs/openvm.git"] +#openvm-transpiler = { path = "../../openvm/crates/toolchain/transpiler" } +#openvm = { path = "../../openvm/crates/toolchain/openvm" } +#openvm-build = { path = "../../openvm/crates/toolchain/build" } +#openvm-rv32im-circuit = { path = "../../openvm/extensions/rv32im/circuit/" } +#openvm-rv32im-transpiler = { path = "../../openvm/extensions/rv32im/transpiler" } +#openvm-rv32im-guest = { path = "../../openvm/extensions/rv32im/guest" } +#openvm-circuit = { path = "../../openvm/crates/vm" } +#openvm-circuit-derive = { path = "../../openvm/crates/vm/derive" } +#openvm-circuit-primitives = { path = "../../openvm/crates/circuits/primitives" } +#openvm-circuit-primitives-derive = { path = "../../openvm/crates/circuits/primitives/derive" } +#openvm-instructions = { path = "../../openvm/crates/toolchain/instructions" } +#openvm-instructions-derive = { path = "../../openvm/crates/toolchain/instructions/derive" } +#openvm-sdk = { path = "../../openvm/crates/sdk" } +#openvm-ecc-transpiler = { path = "../../openvm/extensions/ecc/transpiler" } +#openvm-keccak256-circuit = { path = "../../openvm/extensions/keccak256/circuit" } +#openvm-keccak256-transpiler = { path = "../../openvm/extensions/keccak256/transpiler" } +#openvm-sha256-circuit = { path = "../../openvm/extensions/sha256/circuit" } +#openvm-sha256-transpiler = { path = "../../openvm/extensions/sha256/transpiler" } +#openvm-algebra-transpiler = { path = "../../openvm/extensions/algebra/transpiler" } +#openvm-native-circuit = { path = "../../openvm/extensions/native/circuit" } +#openvm-native-recursion = { path = "../../openvm/extensions/native/recursion" } diff --git a/openvm/guest-keccak-manual-precompile/Cargo.toml b/openvm/guest-keccak-manual-precompile/Cargo.toml new file mode 100644 index 000000000..dfeb4c2f4 --- /dev/null +++ b/openvm/guest-keccak-manual-precompile/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "keccak-example" +version = "0.0.0" +edition = "2021" + +[workspace] +members = [] + +[dependencies] +openvm = { git = "https://github.com/powdr-labs/openvm.git", branch = "powdr" } +openvm-platform = { git = "https://github.com/powdr-labs/openvm.git", branch = "powdr" } +openvm-keccak256-guest = { git = "https://github.com/powdr-labs/openvm.git", branch = "powdr" } + +[features] +default = [] +std = [ + "openvm/std", +] diff --git a/openvm/guest-keccak-manual-precompile/openvm.toml b/openvm/guest-keccak-manual-precompile/openvm.toml new file mode 100644 index 000000000..90e80fa97 --- /dev/null +++ b/openvm/guest-keccak-manual-precompile/openvm.toml @@ -0,0 +1,4 @@ +[app_vm_config.rv32i] +[app_vm_config.rv32m] +[app_vm_config.io] +[app_vm_config.keccak] \ No newline at end of file diff --git a/openvm/guest-keccak-manual-precompile/src/main.rs b/openvm/guest-keccak-manual-precompile/src/main.rs new file mode 100644 index 000000000..3bb53f38e --- /dev/null +++ b/openvm/guest-keccak-manual-precompile/src/main.rs @@ -0,0 +1,22 @@ +#![cfg_attr(not(feature = "std"), no_main)] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +use core::hint::black_box; + +use openvm::io::reveal; +use openvm_keccak256_guest::set_keccak256; + +openvm::entry!(main); + +const N: usize = 5_000; + +pub fn main() { + let mut output = [0u8; 32]; + for _ in 0..N { + set_keccak256(&black_box(output), &mut output); + } + + reveal(output[0] as u32, 0); +} diff --git a/openvm/guest-keccak/Cargo.toml b/openvm/guest-keccak/Cargo.toml new file mode 100644 index 000000000..520379bad --- /dev/null +++ b/openvm/guest-keccak/Cargo.toml @@ -0,0 +1,13 @@ +[workspace] +[package] +name = "guest-keccak-stdin" +version = "0.0.0" +edition = "2021" + +[dependencies] +openvm = { git = "https://github.com/powdr-labs/openvm.git", branch = "powdr" } +tiny-keccak = { version = "2.0.2", features = ["keccak"] } + +[profile.release-with-debug] +inherits = "release" +debug = true diff --git a/openvm/guest-keccak/src/main.rs b/openvm/guest-keccak/src/main.rs new file mode 100644 index 000000000..831141a3c --- /dev/null +++ b/openvm/guest-keccak/src/main.rs @@ -0,0 +1,21 @@ +#![cfg_attr(target_os = "zkvm", no_main)] +#![cfg_attr(target_os = "zkvm", no_std)] + +openvm::entry!(main); + +use core::hint::black_box; + +use openvm::io::{reveal, read}; +use tiny_keccak::{Hasher, Keccak}; + +pub fn main() { + let n: u32 = read(); + let mut output = black_box([0u8; 32]); + for _ in 0..n { + let mut hasher = Keccak::v256(); + hasher.update(&output); + hasher.finalize(&mut output); + } + + reveal(output[0] as u32, 0); +} diff --git a/openvm/guest-matmul/Cargo.toml b/openvm/guest-matmul/Cargo.toml new file mode 100644 index 000000000..11e8644e4 --- /dev/null +++ b/openvm/guest-matmul/Cargo.toml @@ -0,0 +1,12 @@ +[workspace] +[package] +name = "powdr-openvm-matmul-test" +version = "0.0.0" +edition = "2021" + +[dependencies] +openvm = { git = "https://github.com/powdr-labs/openvm.git", branch = "powdr" } + +[profile.release-with-debug] +inherits = "release" +debug = true diff --git a/openvm/guest-matmul/src/main.rs b/openvm/guest-matmul/src/main.rs new file mode 100644 index 000000000..74e6112da --- /dev/null +++ b/openvm/guest-matmul/src/main.rs @@ -0,0 +1,275 @@ +#![cfg_attr(target_os = "zkvm", no_main)] +#![cfg_attr(target_os = "zkvm", no_std)] + +openvm::entry!(main); + +use openvm::io::reveal; + +pub fn main() { + loop_test_matrix(); +} + +const SIZE: usize = 6; + +type Mat = [[i32; SIZE]; SIZE]; + +#[inline(never)] +fn matrix_multiply_unrolled(a: &Mat, b: &Mat, c: &mut Mat) { + c[0][0] = a[0][0] * b[0][0] + + a[0][1] * b[1][0] + + a[0][2] * b[2][0] + + a[0][3] * b[3][0] + + a[0][4] * b[4][0] + + a[0][5] * b[5][0]; + c[0][1] = a[0][0] * b[0][1] + + a[0][1] * b[1][1] + + a[0][2] * b[2][1] + + a[0][3] * b[3][1] + + a[0][4] * b[4][1] + + a[0][5] * b[5][1]; + c[0][2] = a[0][0] * b[0][2] + + a[0][1] * b[1][2] + + a[0][2] * b[2][2] + + a[0][3] * b[3][2] + + a[0][4] * b[4][2] + + a[0][5] * b[5][2]; + c[0][3] = a[0][0] * b[0][3] + + a[0][1] * b[1][3] + + a[0][2] * b[2][3] + + a[0][3] * b[3][3] + + a[0][4] * b[4][3] + + a[0][5] * b[5][3]; + c[0][4] = a[0][0] * b[0][4] + + a[0][1] * b[1][4] + + a[0][2] * b[2][4] + + a[0][3] * b[3][4] + + a[0][4] * b[4][4] + + a[0][5] * b[5][4]; + c[0][5] = a[0][0] * b[0][5] + + a[0][1] * b[1][5] + + a[0][2] * b[2][5] + + a[0][3] * b[3][5] + + a[0][4] * b[4][5] + + a[0][5] * b[5][5]; + + c[1][0] = a[1][0] * b[0][0] + + a[1][1] * b[1][0] + + a[1][2] * b[2][0] + + a[1][3] * b[3][0] + + a[1][4] * b[4][0] + + a[1][5] * b[5][0]; + c[1][1] = a[1][0] * b[0][1] + + a[1][1] * b[1][1] + + a[1][2] * b[2][1] + + a[1][3] * b[3][1] + + a[1][4] * b[4][1] + + a[1][5] * b[5][1]; + c[1][2] = a[1][0] * b[0][2] + + a[1][1] * b[1][2] + + a[1][2] * b[2][2] + + a[1][3] * b[3][2] + + a[1][4] * b[4][2] + + a[1][5] * b[5][2]; + c[1][3] = a[1][0] * b[0][3] + + a[1][1] * b[1][3] + + a[1][2] * b[2][3] + + a[1][3] * b[3][3] + + a[1][4] * b[4][3] + + a[1][5] * b[5][3]; + c[1][4] = a[1][0] * b[0][4] + + a[1][1] * b[1][4] + + a[1][2] * b[2][4] + + a[1][3] * b[3][4] + + a[1][4] * b[4][4] + + a[1][5] * b[5][4]; + c[1][5] = a[1][0] * b[0][5] + + a[1][1] * b[1][5] + + a[1][2] * b[2][5] + + a[1][3] * b[3][5] + + a[1][4] * b[4][5] + + a[1][5] * b[5][5]; + + c[2][0] = a[2][0] * b[0][0] + + a[2][1] * b[1][0] + + a[2][2] * b[2][0] + + a[2][3] * b[3][0] + + a[2][4] * b[4][0] + + a[2][5] * b[5][0]; + c[2][1] = a[2][0] * b[0][1] + + a[2][1] * b[1][1] + + a[2][2] * b[2][1] + + a[2][3] * b[3][1] + + a[2][4] * b[4][1] + + a[2][5] * b[5][1]; + c[2][2] = a[2][0] * b[0][2] + + a[2][1] * b[1][2] + + a[2][2] * b[2][2] + + a[2][3] * b[3][2] + + a[2][4] * b[4][2] + + a[2][5] * b[5][2]; + c[2][3] = a[2][0] * b[0][3] + + a[2][1] * b[1][3] + + a[2][2] * b[2][3] + + a[2][3] * b[3][3] + + a[2][4] * b[4][3] + + a[2][5] * b[5][3]; + c[2][4] = a[2][0] * b[0][4] + + a[2][1] * b[1][4] + + a[2][2] * b[2][4] + + a[2][3] * b[3][4] + + a[2][4] * b[4][4] + + a[2][5] * b[5][4]; + c[2][5] = a[2][0] * b[0][5] + + a[2][1] * b[1][5] + + a[2][2] * b[2][5] + + a[2][3] * b[3][5] + + a[2][4] * b[4][5] + + a[2][5] * b[5][5]; + + c[3][0] = a[3][0] * b[0][0] + + a[3][1] * b[1][0] + + a[3][2] * b[2][0] + + a[3][3] * b[3][0] + + a[3][4] * b[4][0] + + a[3][5] * b[5][0]; + c[3][1] = a[3][0] * b[0][1] + + a[3][1] * b[1][1] + + a[3][2] * b[2][1] + + a[3][3] * b[3][1] + + a[3][4] * b[4][1] + + a[3][5] * b[5][1]; + c[3][2] = a[3][0] * b[0][2] + + a[3][1] * b[1][2] + + a[3][2] * b[2][2] + + a[3][3] * b[3][2] + + a[3][4] * b[4][2] + + a[3][5] * b[5][2]; + c[3][3] = a[3][0] * b[0][3] + + a[3][1] * b[1][3] + + a[3][2] * b[2][3] + + a[3][3] * b[3][3] + + a[3][4] * b[4][3] + + a[3][5] * b[5][3]; + c[3][4] = a[3][0] * b[0][4] + + a[3][1] * b[1][4] + + a[3][2] * b[2][4] + + a[3][3] * b[3][4] + + a[3][4] * b[4][4] + + a[3][5] * b[5][4]; + c[3][5] = a[3][0] * b[0][5] + + a[3][1] * b[1][5] + + a[3][2] * b[2][5] + + a[3][3] * b[3][5] + + a[3][4] * b[4][5] + + a[3][5] * b[5][5]; + + c[4][0] = a[4][0] * b[0][0] + + a[4][1] * b[1][0] + + a[4][2] * b[2][0] + + a[4][3] * b[3][0] + + a[4][4] * b[4][0] + + a[4][5] * b[5][0]; + c[4][1] = a[4][0] * b[0][1] + + a[4][1] * b[1][1] + + a[4][2] * b[2][1] + + a[4][3] * b[3][1] + + a[4][4] * b[4][1] + + a[4][5] * b[5][1]; + c[4][2] = a[4][0] * b[0][2] + + a[4][1] * b[1][2] + + a[4][2] * b[2][2] + + a[4][3] * b[3][2] + + a[4][4] * b[4][2] + + a[4][5] * b[5][2]; + c[4][3] = a[4][0] * b[0][3] + + a[4][1] * b[1][3] + + a[4][2] * b[2][3] + + a[4][3] * b[3][3] + + a[4][4] * b[4][3] + + a[4][5] * b[5][3]; + c[4][4] = a[4][0] * b[0][4] + + a[4][1] * b[1][4] + + a[4][2] * b[2][4] + + a[4][3] * b[3][4] + + a[4][4] * b[4][4] + + a[4][5] * b[5][4]; + c[4][5] = a[4][0] * b[0][5] + + a[4][1] * b[1][5] + + a[4][2] * b[2][5] + + a[4][3] * b[3][5] + + a[4][4] * b[4][5] + + a[4][5] * b[5][5]; + + c[5][0] = a[5][0] * b[0][0] + + a[5][1] * b[1][0] + + a[5][2] * b[2][0] + + a[5][3] * b[3][0] + + a[5][4] * b[4][0] + + a[5][5] * b[5][0]; + c[5][1] = a[5][0] * b[0][1] + + a[5][1] * b[1][1] + + a[5][2] * b[2][1] + + a[5][3] * b[3][1] + + a[5][4] * b[4][1] + + a[5][5] * b[5][1]; + c[5][2] = a[5][0] * b[0][2] + + a[5][1] * b[1][2] + + a[5][2] * b[2][2] + + a[5][3] * b[3][2] + + a[5][4] * b[4][2] + + a[5][5] * b[5][2]; + c[5][3] = a[5][0] * b[0][3] + + a[5][1] * b[1][3] + + a[5][2] * b[2][3] + + a[5][3] * b[3][3] + + a[5][4] * b[4][3] + + a[5][5] * b[5][3]; + c[5][4] = a[5][0] * b[0][4] + + a[5][1] * b[1][4] + + a[5][2] * b[2][4] + + a[5][3] * b[3][4] + + a[5][4] * b[4][4] + + a[5][5] * b[5][4]; + c[5][5] = a[5][0] * b[0][5] + + a[5][1] * b[1][5] + + a[5][2] * b[2][5] + + a[5][3] * b[3][5] + + a[5][4] * b[4][5] + + a[5][5] * b[5][5]; +} + +#[inline(never)] +fn test_matrix() { + let a: Mat = [ + [1, 2, 3, 4, 5, 6], + [7, 8, 9, 10, 11, 12], + [13, 14, 15, 16, 17, 18], + [19, 20, 21, 22, 23, 24], + [25, 26, 27, 28, 29, 30], + [31, 32, 33, 34, 35, 36], + ]; + + let b: Mat = [ + [37, 38, 39, 40, 41, 42], + [43, 44, 45, 46, 47, 48], + [49, 50, 51, 52, 53, 54], + [55, 56, 57, 58, 59, 60], + [61, 62, 63, 64, 65, 66], + [67, 68, 69, 70, 71, 72], + ]; + + let mut c: Mat = [[0; SIZE]; SIZE]; + + matrix_multiply_unrolled(&a, &b, &mut c); + + assert_eq!(c[0][0], 1197); + reveal(c[0][0] as u32, 0); + reveal(c[5][5] as u32, 1); +} + +#[inline(never)] +fn loop_test_matrix() { + for _ in 0..8000 { + test_matrix(); + } +} diff --git a/openvm/guest/Cargo.toml b/openvm/guest/Cargo.toml new file mode 100644 index 000000000..46a5dde05 --- /dev/null +++ b/openvm/guest/Cargo.toml @@ -0,0 +1,12 @@ +[workspace] +[package] +name = "powdr-openvm-guest-stdin-test" +version = "0.0.0" +edition = "2021" + +[dependencies] +openvm = { git = "https://github.com/powdr-labs/openvm.git", branch = "powdr" } + +[profile.release-with-debug] +inherits = "release" +debug = true diff --git a/openvm/guest/src/main.rs b/openvm/guest/src/main.rs new file mode 100644 index 000000000..1f76164ba --- /dev/null +++ b/openvm/guest/src/main.rs @@ -0,0 +1,22 @@ +#![cfg_attr(target_os = "zkvm", no_main)] +#![cfg_attr(target_os = "zkvm", no_std)] + +openvm::entry!(main); + +use openvm::io::{reveal, read}; + +pub fn main() { + let n: u32 = read(); + let mut a: u32 = 0; + let mut b: u32 = 1; + for _ in 1..n { + let sum = a + b; + a = b; + b = sum; + } + if a == 0 { + panic!(); + } + + reveal(a, 0); +} diff --git a/openvm/src/air_builder.rs b/openvm/src/air_builder.rs new file mode 100644 index 000000000..5e4c44cb2 --- /dev/null +++ b/openvm/src/air_builder.rs @@ -0,0 +1,88 @@ +use std::sync::Arc; + +use openvm_stark_backend::air_builders::symbolic::get_symbolic_builder; +use openvm_stark_backend::air_builders::symbolic::SymbolicRapBuilder; +use openvm_stark_backend::config::Com; +use openvm_stark_backend::config::StarkGenericConfig; +use openvm_stark_backend::config::Val; +use openvm_stark_backend::interaction::RapPhaseSeqKind; +use openvm_stark_backend::keygen::types::ProverOnlySinglePreprocessedData; +use openvm_stark_backend::keygen::types::TraceWidth; +use openvm_stark_backend::keygen::types::VerifierSinglePreprocessedData; +use openvm_stark_backend::p3_commit::Pcs; +use openvm_stark_backend::p3_matrix::Matrix; +use openvm_stark_backend::rap::AnyRap; + +pub struct PrepKeygenData { + pub _verifier_data: Option>>, + pub prover_data: Option>, +} + +pub struct AirKeygenBuilder { + air: Arc>, + prep_keygen_data: PrepKeygenData, +} + +fn compute_prep_data_for_air( + pcs: &SC::Pcs, + air: &dyn AnyRap, +) -> PrepKeygenData { + let preprocessed_trace = air.preprocessed_trace(); + let vpdata_opt = preprocessed_trace.map(|trace| { + let domain = pcs.natural_domain_for_degree(trace.height()); + let (commit, data) = pcs.commit(vec![(domain, trace.clone())]); + let vdata = VerifierSinglePreprocessedData { commit }; + let pdata = ProverOnlySinglePreprocessedData { + trace: Arc::new(trace), + data: Arc::new(data), + }; + (vdata, pdata) + }); + if let Some((vdata, pdata)) = vpdata_opt { + PrepKeygenData { + prover_data: Some(pdata), + _verifier_data: Some(vdata), + } + } else { + PrepKeygenData { + prover_data: None, + _verifier_data: None, + } + } +} + +impl AirKeygenBuilder { + pub fn new(pcs: &SC::Pcs, air: Arc>) -> Self { + let prep_keygen_data = compute_prep_data_for_air(pcs, air.as_ref()); + AirKeygenBuilder { + air, + prep_keygen_data, + } + } + + pub fn get_symbolic_builder( + &self, + max_constraint_degree: Option, + ) -> SymbolicRapBuilder> { + let width = TraceWidth { + preprocessed: self.prep_keygen_data.width(), + cached_mains: self.air.cached_main_widths(), + common_main: self.air.common_main_width(), + after_challenge: vec![], + }; + get_symbolic_builder( + self.air.as_ref(), + &width, + &[], + &[], + RapPhaseSeqKind::None, + max_constraint_degree.unwrap_or(0), + ) + } +} + +impl PrepKeygenData { + pub fn width(&self) -> Option { + self.prover_data.as_ref().map(|d| d.trace.width()) + } +} diff --git a/openvm/src/bus_interaction_handler/bitwise_lookup.rs b/openvm/src/bus_interaction_handler/bitwise_lookup.rs new file mode 100644 index 000000000..472991e57 --- /dev/null +++ b/openvm/src/bus_interaction_handler/bitwise_lookup.rs @@ -0,0 +1,151 @@ +use powdr::{FieldElement, LargeInt}; +use powdr_constraint_solver::range_constraint::RangeConstraint; + +use super::byte_constraint; + +pub fn handle_bitwise_lookup( + payload: &[RangeConstraint], +) -> Vec> { + // See: https://github.com/openvm-org/openvm/blob/v1.0.0/crates/circuits/primitives/src/bitwise_op_lookup/bus.rs + // Expects (x, y, z, op), where: + // - if op == 0, x & y are bytes, z = 0 + // - if op == 1, x & y are bytes, z = x ^ y + + let [x, y, _z, op] = payload else { + panic!("Expected arguments (x, y, z, op)"); + }; + match op + .try_to_single_value() + .map(|v| v.to_integer().try_into_u64().unwrap()) + { + // Range constraint on x & y, z = 0 + Some(0) => vec![ + byte_constraint(), + byte_constraint(), + RangeConstraint::from_value(T::zero()), + RangeConstraint::from_value(T::zero()), + ], + // z = x ^ y + Some(1) => { + if let (Some(x), Some(y)) = (x.try_to_single_value(), y.try_to_single_value()) { + // Both inputs are known, can compute result concretely + let z = T::from( + x.to_integer().try_into_u64().unwrap() ^ y.to_integer().try_into_u64().unwrap(), + ); + vec![ + RangeConstraint::from_value(x), + RangeConstraint::from_value(y), + RangeConstraint::from_value(z), + RangeConstraint::from_value(T::one()), + ] + } else { + // The result of an XOR can only be a byte and have bits set that are set in either x or y + let z_constraint = x.disjunction(y).conjunction(&byte_constraint()); + vec![ + byte_constraint(), + byte_constraint(), + z_constraint, + RangeConstraint::from_value(T::one()), + ] + } + } + // Operation is unknown, but we know that x, y, and z are bytes + // and that op is 0 or 1 + None => vec![ + byte_constraint(), + byte_constraint(), + byte_constraint(), + RangeConstraint::from_mask(0x1u64), + ], + _ => panic!("Invalid operation"), + } +} + +#[cfg(test)] +mod tests { + use crate::bus_interaction_handler::{ + test_utils::*, OpenVmBusInteractionHandler, BITWISE_LOOKUP, + }; + + use super::*; + use powdr::number::BabyBearField; + use powdr_constraint_solver::constraint_system::{BusInteraction, BusInteractionHandler}; + + fn run( + x: RangeConstraint, + y: RangeConstraint, + z: RangeConstraint, + op: RangeConstraint, + ) -> Vec> { + let handler = OpenVmBusInteractionHandler::::default(); + + let bus_interaction = BusInteraction { + bus_id: RangeConstraint::from_value(BITWISE_LOOKUP.into()), + multiplicity: value(1), + payload: vec![x, y, z, op], + }; + let result = handler.handle_bus_interaction(bus_interaction); + result.payload + } + + #[test] + fn test_byte_constraint() { + let result = run(default(), default(), default(), value(0)); + + assert_eq!(result.len(), 4); + assert_eq!(result[0], mask(0xff)); + assert_eq!(result[1], mask(0xff)); + assert_eq!(result[2], value(0)); + assert_eq!(result[3], value(0)); + } + + #[test] + fn test_xor_known() { + let result = run(value(0b10101010), value(0b11001100), default(), value(1)); + + assert_eq!(result.len(), 4); + assert_eq!(result[0], value(0b10101010)); + assert_eq!(result[1], value(0b11001100)); + assert_eq!(result[2], value(0b01100110)); + assert_eq!(result[3], value(1)); + } + + #[test] + fn test_xor_unknown() { + let result = run(default(), default(), default(), value(1)); + + assert_eq!(result.len(), 4); + assert_eq!(result[0], mask(0xff)); + assert_eq!(result[1], mask(0xff)); + assert_eq!(result[2], mask(0xff)); + assert_eq!(result[3], value(1)); + } + + #[test] + fn test_xor_one_unknown() { + let result = run(mask(0xabcd), value(0), default(), value(1)); + + assert_eq!(result.len(), 4); + // Note that this constraint could be tighter (0xcd), but the solver + // will get to this by intersecting the result with the input + // constraints. + assert_eq!(result[0], mask(0xff)); + // Same here + assert_eq!(result[1], mask(0xff)); + // We won't be able to compute the result, but we know that the range + // constraint of `x` also applies to `z`. + assert_eq!(result[2], mask(0xcd)); + assert_eq!(result[3], value(1)); + } + + #[test] + fn test_unknown_operation() { + let result = run(default(), default(), default(), default()); + + assert_eq!(result.len(), 4); + assert_eq!(result[0], mask(0xff)); + assert_eq!(result[1], mask(0xff)); + assert_eq!(result[2], mask(0xff)); + assert_eq!(result[3], mask(0x1)); + } +} diff --git a/openvm/src/bus_interaction_handler/memory.rs b/openvm/src/bus_interaction_handler/memory.rs new file mode 100644 index 000000000..145eb5ec0 --- /dev/null +++ b/openvm/src/bus_interaction_handler/memory.rs @@ -0,0 +1,99 @@ +use openvm_instructions::riscv::{RV32_MEMORY_AS, RV32_REGISTER_AS}; +use powdr::{FieldElement, LargeInt}; +use powdr_constraint_solver::range_constraint::RangeConstraint; + +use super::byte_constraint; + +pub fn handle_memory( + payload: &[RangeConstraint], + multiplicity: T, +) -> Vec> { + // See: https://github.com/openvm-org/openvm/blob/main/crates/vm/src/system/memory/offline_checker/bus.rs + // Expects (address_space, pointer, data, timestamp). + if payload.len() < 4 { + panic!("Expected at least 4 arguments"); + } + + let address_space = &payload[0]; + let pointer = &payload[1]; + let timestamp = &payload[payload.len() - 1]; + let data = &payload[2..payload.len() - 1]; + + let is_receive = if multiplicity == -T::one() { + true + } else if multiplicity == T::one() { + false + } else { + panic!("Expected multiplicity to be 1 or -1, got: {multiplicity}"); + }; + + let address_space_value = address_space + .try_to_single_value() + .map(|v| v.to_integer().try_into_u32().unwrap()); + + match (is_receive, address_space_value) { + // By the assumption that all data written to registers or memory are range-checked, + // we can return a byte range constraint for the data. + (false, Some(RV32_REGISTER_AS)) | (false, Some(RV32_MEMORY_AS)) => { + let data = data.iter().map(|_| byte_constraint()).collect::>(); + + vec![address_space.clone(), pointer.clone()] + .into_iter() + .chain(data) + .chain(std::iter::once(timestamp.clone())) + .collect() + } + // Otherwise, we can't improve the constraints + _ => payload.to_vec(), + } +} + +#[cfg(test)] +mod tests { + use crate::bus_interaction_handler::{test_utils::*, OpenVmBusInteractionHandler, MEMORY}; + + use super::*; + use powdr::number::BabyBearField; + use powdr_constraint_solver::constraint_system::{BusInteraction, BusInteractionHandler}; + + fn run( + address_space: RangeConstraint, + pointer: RangeConstraint, + data: Vec>, + timestamp: RangeConstraint, + multiplicity: u64, + ) -> Vec> { + let handler = OpenVmBusInteractionHandler::::default(); + + let bus_interaction = BusInteraction { + bus_id: RangeConstraint::from_value(MEMORY.into()), + multiplicity: value(multiplicity), + payload: std::iter::once(address_space) + .chain(std::iter::once(pointer)) + .chain(data) + .chain(std::iter::once(timestamp)) + .collect(), + }; + let result = handler.handle_bus_interaction(bus_interaction); + result.payload + } + + #[test] + fn test_read() { + let address_space = value(RV32_MEMORY_AS as u64); + let pointer = value(0x1234); + let data = vec![default(); 4]; + let timestamp = value(0x5678); + + let result = run(address_space, pointer, data, timestamp, 1); + + assert_eq!(result.len(), 7); + assert_eq!(result[0], value(RV32_MEMORY_AS as u64)); + assert_eq!(result[1], value(0x1234)); + assert_eq!(result[2], byte_constraint()); + assert_eq!(result[3], byte_constraint()); + assert_eq!(result[4], byte_constraint()); + assert_eq!(result[5], byte_constraint()); + assert_eq!(result[6], value(0x5678)); + } +} diff --git a/openvm/src/bus_interaction_handler/mod.rs b/openvm/src/bus_interaction_handler/mod.rs new file mode 100644 index 000000000..fe713a487 --- /dev/null +++ b/openvm/src/bus_interaction_handler/mod.rs @@ -0,0 +1,159 @@ +use bitwise_lookup::handle_bitwise_lookup; +use memory::handle_memory; +use powdr::{FieldElement, LargeInt}; +use powdr_autoprecompiles::optimizer::{ + ConcreteBusInteractionHandler, ConcreteBusInteractionResult, +}; +use powdr_constraint_solver::{ + constraint_system::{BusInteraction, BusInteractionHandler}, + range_constraint::RangeConstraint, +}; +use tuple_range_checker::handle_tuple_range_checker; +use variable_range_checker::handle_variable_range_checker; + +mod bitwise_lookup; +mod memory; +mod tuple_range_checker; +mod variable_range_checker; + +const EXECUTION_BRIDGE: u64 = 0; +const MEMORY: u64 = 1; +const PC_LOOKUP: u64 = 2; +const VARIABLE_RANGE_CHECKER: u64 = 3; +const BITWISE_LOOKUP: u64 = 6; +const TUPLE_RANGE_CHECKER: u64 = 7; + +pub enum BusType { + ExecutionBridge, + Memory, + PcLookup, + VariableRangeChecker, + BitwiseLookup, + TupleRangeChecker, +} + +pub fn bus_type(bus_id: u64) -> BusType { + match bus_id { + EXECUTION_BRIDGE => BusType::ExecutionBridge, + MEMORY => BusType::Memory, + PC_LOOKUP => BusType::PcLookup, + VARIABLE_RANGE_CHECKER => BusType::VariableRangeChecker, + BITWISE_LOOKUP => BusType::BitwiseLookup, + TUPLE_RANGE_CHECKER => BusType::TupleRangeChecker, + _ => panic!("Unknown bus ID: {bus_id}"), + } +} + +#[derive(Default, Clone)] +pub struct OpenVmBusInteractionHandler { + _phantom: std::marker::PhantomData, +} + +impl BusInteractionHandler for OpenVmBusInteractionHandler { + fn handle_bus_interaction( + &self, + bus_interaction: BusInteraction>, + ) -> BusInteraction> { + let (Some(bus_id), Some(multiplicity)) = ( + bus_interaction.bus_id.try_to_single_value(), + bus_interaction.multiplicity.try_to_single_value(), + ) else { + return bus_interaction; + }; + + if multiplicity.is_zero() { + return bus_interaction; + } + + let payload_constraints = match bus_type(bus_id.to_integer().try_into_u64().unwrap()) { + // Sends / receives (pc, timestamp) pairs. They could have any value. + BusType::ExecutionBridge => bus_interaction.payload, + // Sends a (pc, opcode, args..) tuple. In theory, we could refine the range constraints + // of the args here, but for auto-precompiles, only the PC will be unknown, which could + // have any value. + BusType::PcLookup => bus_interaction.payload, + BusType::BitwiseLookup => handle_bitwise_lookup(&bus_interaction.payload), + BusType::Memory => handle_memory(&bus_interaction.payload, multiplicity), + BusType::VariableRangeChecker => { + handle_variable_range_checker(&bus_interaction.payload) + } + BusType::TupleRangeChecker => handle_tuple_range_checker(&bus_interaction.payload), + }; + BusInteraction { + payload: payload_constraints, + ..bus_interaction + } + } +} + +fn byte_constraint() -> RangeConstraint { + RangeConstraint::from_mask(0xffu64) +} + +impl ConcreteBusInteractionHandler for OpenVmBusInteractionHandler { + fn handle_concrete_bus_interaction( + &self, + bus_interaction: BusInteraction, + ) -> ConcreteBusInteractionResult { + // If multiplicity is zero, can remove without inspecting + if bus_interaction.multiplicity.is_zero() { + return ConcreteBusInteractionResult::AlwaysSatisfied; + } + + match bus_type(bus_interaction.bus_id.to_integer().try_into_u64().unwrap()) { + BusType::ExecutionBridge => { + // Execution bridge could have any value. + ConcreteBusInteractionResult::HasSideEffects + } + BusType::PcLookup => { + // For auto-precompiles, the PC will be unknown, which could have any value. + unreachable!("PC can't be known at compile time, so shouldn't become a bus interaction with concrete values!") + } + BusType::Memory => { + // Memory read/write will always have side effects + // so we can't remove the bus interaction without changing the statement being proven. + ConcreteBusInteractionResult::HasSideEffects + } + BusType::BitwiseLookup | BusType::VariableRangeChecker | BusType::TupleRangeChecker => { + // Fixed lookups can always be satisfied unless the bus rules are violated. + // This can be checked via BusInteractionHandler::handle_bus_interaction_checked. + let range_constraints = BusInteraction::from_iter( + bus_interaction + .iter() + .map(|v| RangeConstraint::from_value(*v)), + ); + if self + .handle_bus_interaction_checked(range_constraints) + .is_err() + { + ConcreteBusInteractionResult::ViolatesBusRules + } else { + ConcreteBusInteractionResult::AlwaysSatisfied + } + } + } + } +} + +#[cfg(test)] +mod test_utils { + + use super::*; + use powdr::number::BabyBearField; + + pub fn value(value: u64) -> RangeConstraint { + RangeConstraint::from_value(BabyBearField::from(value)) + } + + pub fn mask(mask: u64) -> RangeConstraint { + RangeConstraint::from_mask(mask) + } + + pub fn range(start: u64, end: u64) -> RangeConstraint { + RangeConstraint::from_range(BabyBearField::from(start), BabyBearField::from(end)) + } + + pub fn default() -> RangeConstraint { + RangeConstraint::default() + } +} diff --git a/openvm/src/bus_interaction_handler/tuple_range_checker.rs b/openvm/src/bus_interaction_handler/tuple_range_checker.rs new file mode 100644 index 000000000..ed4de0657 --- /dev/null +++ b/openvm/src/bus_interaction_handler/tuple_range_checker.rs @@ -0,0 +1,63 @@ +use powdr::FieldElement; +use powdr_constraint_solver::range_constraint::RangeConstraint; + +/// Maximum value of the first element, +/// see https://github.com/openvm-org/openvm/blob/main/extensions/rv32im/circuit/src/extension.rs#L124 +// TODO: This should be configurable +const MAX_0: u64 = (1u64 << 8) - 1; + +/// Maximum value of the second element, +/// see https://github.com/openvm-org/openvm/blob/main/extensions/rv32im/circuit/src/extension.rs#L124 +// TODO: This should be configurable +const MAX_1: u64 = (8 * (1 << 8)) - 1; + +pub fn handle_tuple_range_checker( + payload: &[RangeConstraint], +) -> Vec> { + // See: https://github.com/openvm-org/openvm/blob/v1.0.0/crates/circuits/primitives/src/range_tuple/bus.rs + // Expects (x, y), where `x` is in the range [0, MAX_0] and `y` is in the range [0, MAX_1] + let [_x, _y] = payload else { + panic!("Expected arguments (x, y)"); + }; + + vec![ + RangeConstraint::from_range(T::from(0u64), T::from(MAX_0)), + RangeConstraint::from_range(T::from(0u64), T::from(MAX_1)), + ] +} + +#[cfg(test)] +mod tests { + use crate::bus_interaction_handler::{ + test_utils::*, OpenVmBusInteractionHandler, TUPLE_RANGE_CHECKER, + }; + + use super::*; + use powdr::number::BabyBearField; + use powdr_constraint_solver::constraint_system::{BusInteraction, BusInteractionHandler}; + + fn run( + x: RangeConstraint, + y: RangeConstraint, + ) -> Vec> { + let handler = OpenVmBusInteractionHandler::::default(); + + let bus_interaction = BusInteraction { + bus_id: RangeConstraint::from_value(TUPLE_RANGE_CHECKER.into()), + multiplicity: value(1), + payload: vec![x, y], + }; + let result = handler.handle_bus_interaction(bus_interaction); + result.payload + } + + #[test] + fn test_unknown() { + let x = default(); + let y = default(); + let result = run(x, y); + assert_eq!(result.len(), 2); + assert_eq!(result[0], range(0, MAX_0)); + assert_eq!(result[1], range(0, MAX_1),); + } +} diff --git a/openvm/src/bus_interaction_handler/variable_range_checker.rs b/openvm/src/bus_interaction_handler/variable_range_checker.rs new file mode 100644 index 000000000..54a87d238 --- /dev/null +++ b/openvm/src/bus_interaction_handler/variable_range_checker.rs @@ -0,0 +1,79 @@ +use powdr::{FieldElement, LargeInt}; +use powdr_constraint_solver::range_constraint::RangeConstraint; + +/// The maximum number of bits that can be checked by the variable range checker. +// TODO: This should be configurable +const MAX_BITS: u64 = 25; + +pub fn handle_variable_range_checker( + payload: &[RangeConstraint], +) -> Vec> { + // See: https://github.com/openvm-org/openvm/blob/v1.0.0/crates/circuits/primitives/src/var_range/bus.rs + // Expects (x, bits), where `x` is in the range [0, 2^bits - 1] + let [_x, bits] = payload else { + panic!("Expected arguments (x, bits)"); + }; + match bits.try_to_single_value() { + Some(bits_value) => { + let bits_value = bits_value.to_integer().try_into_u64().unwrap(); + assert!(bits_value <= MAX_BITS); + let mask = (1u64 << bits_value) - 1; + vec![RangeConstraint::from_mask(mask), bits.clone()] + } + None => { + vec![ + RangeConstraint::from_mask((1u64 << MAX_BITS) - 1), + RangeConstraint::from_range(T::from(0), T::from(MAX_BITS)), + ] + } + } +} + +#[cfg(test)] +mod tests { + use crate::bus_interaction_handler::{ + test_utils::*, OpenVmBusInteractionHandler, VARIABLE_RANGE_CHECKER, + }; + + use super::*; + use powdr::number::BabyBearField; + use powdr_constraint_solver::constraint_system::{BusInteraction, BusInteractionHandler}; + + fn run( + x: RangeConstraint, + bits: RangeConstraint, + ) -> Vec> { + let handler = OpenVmBusInteractionHandler::::default(); + + let bus_interaction = BusInteraction { + bus_id: RangeConstraint::from_value(VARIABLE_RANGE_CHECKER.into()), + multiplicity: value(1), + payload: vec![x, bits], + }; + let result = handler.handle_bus_interaction(bus_interaction); + result.payload + } + + #[test] + fn test_unknown_bits() { + let x = default(); + let bits = default(); + let result = run(x, bits); + assert_eq!(result.len(), 2); + assert_eq!( + result[0], + RangeConstraint::from_mask((1u64 << MAX_BITS) - 1) + ); + assert_eq!(result[1], range(0, MAX_BITS)); + } + + #[test] + fn test_known_bits() { + let x = default(); + let bits = value(12); + let result = run(x, bits); + assert_eq!(result.len(), 2); + assert_eq!(result[0], mask(0xfff)); + assert_eq!(result[1], value(12)); + } +} diff --git a/openvm/src/customize_exe.rs b/openvm/src/customize_exe.rs new file mode 100644 index 000000000..e5b732dbc --- /dev/null +++ b/openvm/src/customize_exe.rs @@ -0,0 +1,513 @@ +use std::collections::{BTreeMap, BTreeSet, HashMap}; + +use itertools::Itertools; +use openvm_algebra_transpiler::{Fp2Opcode, Rv32ModularArithmeticOpcode}; +use openvm_ecc_transpiler::Rv32WeierstrassOpcode; +use openvm_instructions::LocalOpcode; +use openvm_instructions::{exe::VmExe, instruction::Instruction, program::Program, VmOpcode}; +use openvm_keccak256_transpiler::Rv32KeccakOpcode; +use openvm_rv32im_transpiler::{Rv32HintStoreOpcode, Rv32LoadStoreOpcode}; +use openvm_sdk::config::SdkVmConfig; +use openvm_sha256_transpiler::Rv32Sha256Opcode; +use openvm_stark_backend::{interaction::SymbolicInteraction, p3_field::PrimeField32}; +use powdr::ast::analyzed::AlgebraicExpression; +use powdr::{FieldElement, LargeInt}; +use powdr_autoprecompiles::powdr::UniqueColumns; +use powdr_autoprecompiles::{ + Autoprecompiles, BusInteractionKind, InstructionKind, SymbolicBusInteraction, + SymbolicConstraint, SymbolicInstructionStatement, SymbolicMachine, +}; + +use crate::bus_interaction_handler::OpenVmBusInteractionHandler; +use crate::instruction_formatter::openvm_instruction_formatter; +use crate::{ + powdr_extension::{OriginalInstruction, PowdrExtension, PowdrOpcode, PowdrPrecompile}, + utils::symbolic_to_algebraic, +}; + +const OPENVM_DEGREE_BOUND: usize = 5; + +const POWDR_OPCODE: usize = 0x10ff; + +pub fn customize( + mut exe: VmExe, + base_config: SdkVmConfig, + labels: &BTreeSet, + airs: &BTreeMap>, + autoprecompiles: usize, + skip: usize, + pc_idx_count: Option>, +) -> (VmExe, PowdrExtension) { + // The following opcodes shall never be accelerated and therefore always put in its own basic block. + // Currently this contains OpenVm opcodes: Rv32HintStoreOpcode::HINT_STOREW (0x260) and Rv32HintStoreOpcode::HINT_BUFFER (0x261) + // which are the only two opcodes from the Rv32HintStore, the air responsible for reading host states via stdin. + // We don't want these opcodes because they create air constraints with next references, which powdr-openvm does not support yet. + let opcodes_no_apc = vec![ + Rv32HintStoreOpcode::HINT_STOREW.global_opcode().as_usize(), + Rv32HintStoreOpcode::HINT_BUFFER.global_opcode().as_usize(), + Rv32LoadStoreOpcode::LOADB.global_opcode().as_usize(), + Rv32LoadStoreOpcode::LOADH.global_opcode().as_usize(), + Rv32WeierstrassOpcode::EC_ADD_NE.global_opcode().as_usize(), + Rv32WeierstrassOpcode::SETUP_EC_ADD_NE + .global_opcode() + .as_usize(), + Rv32WeierstrassOpcode::EC_DOUBLE.global_opcode().as_usize(), + Rv32WeierstrassOpcode::SETUP_EC_DOUBLE + .global_opcode() + .as_usize(), + Rv32WeierstrassOpcode::EC_ADD_NE.global_opcode().as_usize() + 4, + Rv32WeierstrassOpcode::SETUP_EC_ADD_NE + .global_opcode() + .as_usize() + + 4, + Rv32WeierstrassOpcode::EC_DOUBLE.global_opcode().as_usize() + 4, + Rv32WeierstrassOpcode::SETUP_EC_DOUBLE + .global_opcode() + .as_usize() + + 4, + Rv32KeccakOpcode::KECCAK256.global_opcode().as_usize(), + Rv32Sha256Opcode::SHA256.global_opcode().as_usize(), + Rv32ModularArithmeticOpcode::ADD.global_opcode().as_usize(), + Rv32ModularArithmeticOpcode::SUB.global_opcode().as_usize(), + Rv32ModularArithmeticOpcode::SETUP_ADDSUB + .global_opcode() + .as_usize(), + Rv32ModularArithmeticOpcode::MUL.global_opcode().as_usize(), + Rv32ModularArithmeticOpcode::DIV.global_opcode().as_usize(), + Rv32ModularArithmeticOpcode::SETUP_MULDIV + .global_opcode() + .as_usize(), + Rv32ModularArithmeticOpcode::IS_EQ + .global_opcode() + .as_usize(), + Rv32ModularArithmeticOpcode::SETUP_ISEQ + .global_opcode() + .as_usize(), + Fp2Opcode::ADD.global_opcode().as_usize(), + Fp2Opcode::SUB.global_opcode().as_usize(), + Fp2Opcode::SETUP_ADDSUB.global_opcode().as_usize(), + Fp2Opcode::MUL.global_opcode().as_usize(), + Fp2Opcode::DIV.global_opcode().as_usize(), + Fp2Opcode::SETUP_MULDIV.global_opcode().as_usize(), + 0x510, // not sure yet what this is + 0x513, // not sure yet what this is + 0x51c, // not sure yet what this is + 0x523, // not sure yet what this is + ]; + + let mut blocks = collect_basic_blocks(&exe.program, labels, &opcodes_no_apc); + tracing::info!("Got {} basic blocks", blocks.len()); + + if let Some(pgo_program_idx_count) = pc_idx_count { + // sort the blocks by block_len * frequency (the count of start_idx in pgo_program_idx_count) + blocks.sort_by(|a, b| { + // not all start index of a basic block can be found in pc_idx_count, because a basic block might not be executed at all + // in this case, they will just default to 0 + let a_count = pgo_program_idx_count + .get(&(a.start_idx as u32)) + .unwrap_or(&0); + let b_count = pgo_program_idx_count + .get(&(b.start_idx as u32)) + .unwrap_or(&0); + + let a_opcode_no_apc = if !a.statements.is_empty() { + opcodes_no_apc.contains(&a.statements[0].opcode.as_usize()) + } else { + true + }; + let b_opcode_no_apc = if !b.statements.is_empty() { + opcodes_no_apc.contains(&b.statements[0].opcode.as_usize()) + } else { + true + }; + + // if a basic block starts with an opcode that is in opcodes_no_apc, put it at the bottom of the list of blocks to order + // otherwise, order by descending cost = instruction count * execution frequency + match (a_opcode_no_apc, b_opcode_no_apc) { + (true, false) => std::cmp::Ordering::Greater, + (false, true) => std::cmp::Ordering::Less, + _ => (b_count * (b.statements.len() as u32)) + .cmp(&(a_count * (a.statements.len() as u32))), + } + }); + + // print block start_idx, cost = block_len * frequency, block_len, and frequency, sorted by descending cost + for block in &blocks { + let start_idx = block.start_idx; + let block_len = block.statements.len(); + let count = pgo_program_idx_count.get(&(start_idx as u32)).unwrap_or(&0); + let cost = count * (block_len as u32); + tracing::info!( + "Basic block start_idx: {}, cost: {}, block_len: {}, frequency: {}", + start_idx, + cost, + block_len, + count + ); + } + } else { + // if pgo option is not set, sort by descending order of block length + blocks.sort_by(|a, b| (b.statements.len()).cmp(&a.statements.len())); + } + + let program = &mut exe.program.instructions_and_debug_infos; + + let noop = Instruction { + opcode: VmOpcode::from_usize(0xdeadaf), + a: F::ZERO, + b: F::ZERO, + c: F::ZERO, + d: F::ZERO, + e: F::ZERO, + f: F::ZERO, + g: F::ZERO, + }; + + let mut extensions = Vec::new(); + let n_acc = autoprecompiles; + let n_skip = skip; + tracing::info!("Generating {n_acc} autoprecompiles"); + + for (i, acc_block) in blocks.iter().skip(n_skip).take(n_acc).enumerate() { + tracing::debug!( + "Accelerating block {i} of length {} and start idx {}", + acc_block.statements.len(), + acc_block.start_idx + ); + + tracing::debug!( + "Acc block: {}", + acc_block.pretty_print(openvm_instruction_formatter) + ); + + let apc_opcode = POWDR_OPCODE + i; + let new_instr = Instruction { + opcode: VmOpcode::from_usize(apc_opcode), + a: F::ZERO, + b: F::ZERO, + c: F::ZERO, + d: F::ZERO, + e: F::ZERO, + f: F::ZERO, + g: F::ZERO, + }; + + let pc = acc_block.start_idx as usize; + let n_acc = acc_block.statements.len(); + let (acc, new_instrs): (Vec<_>, Vec<_>) = program[pc..pc + n_acc] + .iter() + .enumerate() + .map(|(i, x)| { + let instr = x.as_ref().unwrap(); + let instr = instr.0.clone(); + if i == 0 { + (instr, new_instr.clone()) + } else { + (instr, noop.clone()) + } + }) + .collect(); + + let new_instrs = new_instrs.into_iter().map(|x| Some((x, None))); + + let len_before = program.len(); + program.splice(pc..pc + n_acc, new_instrs); + assert_eq!(program.len(), len_before); + + let (autoprecompile, subs) = + generate_autoprecompile::(acc_block, airs, apc_opcode); + + let is_valid_column = autoprecompile + .unique_columns() + .find(|c| c.name == "is_valid") + .unwrap(); + + let opcodes_in_acc = acc + .iter() + .map(|x| x.opcode.as_usize()) + .unique() + .collect_vec(); + + extensions.push(PowdrPrecompile::new( + format!("PowdrAutoprecompile_{i}"), + PowdrOpcode { + class_offset: apc_opcode, + }, + transpose_symbolic_machine(autoprecompile), + acc.into_iter() + .zip_eq(subs) + .map(|(instruction, subs)| OriginalInstruction::new(instruction, subs)) + .collect(), + airs.iter() + .filter(|(i, _)| opcodes_in_acc.contains(*i)) + .map(|(i, air)| (*i, transpose_symbolic_machine(air.clone()))) + .collect(), + is_valid_column, + )); + } + + (exe, PowdrExtension::new(extensions, base_config)) +} + +// TODO collect properly from opcode enums +const BRANCH_OPCODES: [usize; 9] = [ + 0x220, 0x221, 0x225, 0x226, 0x227, 0x228, 0x230, 0x231, 0x235, +]; +pub fn is_jump(instruction: &VmOpcode) -> bool { + let opcode = instruction.as_usize(); + BRANCH_OPCODES.contains(&opcode) +} + +#[derive(Debug, Clone)] +pub struct BasicBlock { + pub start_idx: u64, + pub statements: Vec>, +} + +impl BasicBlock { + fn pretty_print(&self, instr_formatter: impl Fn(&Instruction) -> String) -> String { + format!("BasicBlock(start_idx: {}, statements: [\n", self.start_idx) + + &self + .statements + .iter() + .enumerate() + .map(|(i, instr)| format!(" instr {i:>3}: {}", instr_formatter(instr))) + .collect::>() + .join("\n") + + "\n])" + } +} + +pub fn collect_basic_blocks( + program: &Program, + labels: &BTreeSet, + opcodes_no_apc: &[usize], +) -> Vec> { + let mut blocks = Vec::new(); + let mut curr_block = BasicBlock { + start_idx: 0, + statements: Vec::new(), + }; + let init_pc = 0x0020_0800; + for (i, instr) in program.instructions_and_debug_infos.iter().enumerate() { + let instr = instr.as_ref().unwrap().0.clone(); + let adjusted_pc = init_pc + (i as u32) * 4; + let is_target = labels.contains(&adjusted_pc); + let is_branch = is_jump(&instr.opcode); + + // If this opcode cannot be in an apc, we make sure it's alone in a BB. + if opcodes_no_apc.contains(&instr.opcode.as_usize()) { + // Push the current block and start a new one from this instruction. + blocks.push(curr_block); + curr_block = BasicBlock { + start_idx: i as u64, + statements: Vec::new(), + }; + // Add the instruction and push the block + curr_block.statements.push(instr.clone()); + blocks.push(curr_block); + // Start a new block from the next instruction. + curr_block = BasicBlock { + start_idx: (i + 1) as u64, + statements: Vec::new(), + }; + } else { + // If the instruction is a target, we need to close the previous block + // as is and start a new block from this instruction. + if is_target { + blocks.push(curr_block); + curr_block = BasicBlock { + start_idx: i as u64, + statements: Vec::new(), + }; + } + curr_block.statements.push(instr.clone()); + // If the instruction is a branch, we need to close this block + // with this instruction and start a new block from the next one. + if is_branch { + blocks.push(curr_block); + curr_block = BasicBlock { + start_idx: (i + 1) as u64, + statements: Vec::new(), + }; + } + } + } + + if !curr_block.statements.is_empty() { + blocks.push(curr_block); + } + + blocks +} + +// OpenVM relevant bus ids: +// 0: execution bridge -> [pc, timestamp] +// 1: memory -> [address space, pointer, data, timestamp, 1] +// 2: pc lookup -> [...] +// 3: range tuple -> [col, bits] +// 5: bitwise xor -> +// [a, b, 0, 0] byte range checks for a and b +// [a, b, c, 1] c = xor(a, b) +fn generate_autoprecompile( + block: &BasicBlock, + airs: &BTreeMap>, + apc_opcode: usize, +) -> (SymbolicMachine

, Vec>) { + tracing::debug!( + "Generating autoprecompile for block at index {}", + block.start_idx + ); + let mut instruction_kind = BTreeMap::new(); + let mut instruction_machines = BTreeMap::new(); + let program = block + .statements + .iter() + .map(|instr| { + let instr_name = format!("{}", instr.opcode); + + let symb_machine = airs.get(&instr.opcode.as_usize()).unwrap(); + + let symb_instr = SymbolicInstructionStatement { + name: instr_name.clone(), + opcode: instr.opcode.as_usize(), + args: [ + instr.a, instr.b, instr.c, instr.d, instr.e, instr.f, instr.g, + ] + .iter() + .map(|f| to_powdr_field::(*f)) + .collect(), + }; + + if is_jump(&instr.opcode) { + instruction_kind.insert(instr_name.clone(), InstructionKind::ConditionalBranch); + } else { + instruction_kind.insert(instr_name.clone(), InstructionKind::Normal); + }; + + instruction_machines.insert(instr_name.clone(), symb_machine.clone()); + + symb_instr + }) + .collect(); + + let autoprecompiles = Autoprecompiles { + program, + instruction_kind, + instruction_machines, + }; + + let (precompile, subs) = autoprecompiles.build( + OpenVmBusInteractionHandler::default(), + OPENVM_DEGREE_BOUND, + apc_opcode as u32, + ); + + // Check that substitution values are unique over all instructions + assert!(subs.iter().flatten().all_unique()); + + tracing::debug!( + "Done generating autoprecompile for block at index {}", + block.start_idx + ); + + (precompile, subs) +} + +pub fn openvm_bus_interaction_to_powdr( + interaction: &SymbolicInteraction, + columns: &[String], +) -> SymbolicBusInteraction

{ + // TODO + let kind = BusInteractionKind::Send; + // let kind = match interaction.interaction_type { + // InteractionType::Send => BusInteractionKind::Send, + // InteractionType::Receive => BusInteractionKind::Receive, + // }; + let id = interaction.bus_index as u64; + + let mult = symbolic_to_algebraic(&interaction.count, columns); + let args = interaction + .message + .iter() + .map(|e| symbolic_to_algebraic(e, columns)) + .collect(); + + SymbolicBusInteraction { + kind, + id, + mult, + args, + } +} + +fn to_powdr_field(f: F) -> P { + f.as_canonical_u32().into() +} + +fn to_ovm_field(f: P) -> F { + F::from_canonical_u32(f.to_integer().try_into_u32().unwrap()) +} + +// Transpose an algebraic expression from the powdr field to openvm field +fn transpose_algebraic_expression( + expr: AlgebraicExpression

, +) -> AlgebraicExpression { + match expr { + AlgebraicExpression::Number(n) => AlgebraicExpression::Number(to_ovm_field(n)), + AlgebraicExpression::Reference(reference) => AlgebraicExpression::Reference(reference), + AlgebraicExpression::PublicReference(reference) => { + AlgebraicExpression::PublicReference(reference) + } + AlgebraicExpression::Challenge(challenge) => AlgebraicExpression::Challenge(challenge), + AlgebraicExpression::BinaryOperation(algebraic_binary_operation) => { + let left = transpose_algebraic_expression(*algebraic_binary_operation.left); + let right = transpose_algebraic_expression(*algebraic_binary_operation.right); + AlgebraicExpression::BinaryOperation(powdr::ast::analyzed::AlgebraicBinaryOperation { + left: Box::new(left), + right: Box::new(right), + op: algebraic_binary_operation.op, + }) + } + AlgebraicExpression::UnaryOperation(algebraic_unary_operation) => { + AlgebraicExpression::UnaryOperation(powdr::ast::analyzed::AlgebraicUnaryOperation { + op: algebraic_unary_operation.op, + expr: Box::new(transpose_algebraic_expression( + *algebraic_unary_operation.expr, + )), + }) + } + } +} + +// Transpose a symbolic machine from the powdr field to openvm field +fn transpose_symbolic_machine( + machine: SymbolicMachine

, +) -> SymbolicMachine { + let constraints = machine + .constraints + .into_iter() + .map(|constraint| SymbolicConstraint { + expr: transpose_algebraic_expression(constraint.expr), + }) + .collect(); + let bus_interactions = machine + .bus_interactions + .into_iter() + .map(|interaction| SymbolicBusInteraction { + kind: interaction.kind, + id: interaction.id, + mult: transpose_algebraic_expression(interaction.mult.clone()), + args: interaction + .args + .iter() + .map(|arg| transpose_algebraic_expression(arg.clone())) + .collect(), + }) + .collect(); + + SymbolicMachine { + constraints, + bus_interactions, + } +} diff --git a/openvm/src/instruction_formatter.rs b/openvm/src/instruction_formatter.rs new file mode 100644 index 000000000..64100d042 --- /dev/null +++ b/openvm/src/instruction_formatter.rs @@ -0,0 +1,91 @@ +use openvm_instructions::instruction::Instruction; +use openvm_stark_backend::p3_field::PrimeField32; + +pub fn openvm_instruction_formatter(instruction: &Instruction) -> String { + let Instruction { + opcode, + a, + b, + c, + d, + e, + f, + g, + } = instruction; + + // Opcodes taken from: + // https://github.com/openvm-org/openvm/blob/v1.0.0/extensions/rv32im/transpiler/src/instructions.rs + match opcode.as_usize() { + // Alu instructions, see: + // https://github.com/openvm-org/openvm/blob/v1.0.0/extensions/rv32im/circuit/src/adapters/alu.rs#L197-L201 + 512..=521 => { + assert_eq!(d, &F::ONE); + assert_eq!(f, &F::ZERO); + assert_eq!(g, &F::ZERO); + let opcode = match opcode.as_usize() { + // Rv32BaseAluChip + 512 => "ADD", + 513 => "SUB", + 514 => "XOR", + 515 => "OR", + 516 => "AND", + // Rv32ShiftChip + 517 => "SLL", + 518 => "SRL", + 519 => "SRA", + // Rv32LessThanChip + 520 => "SLT", + 521 => "SLTU", + _ => unreachable!(), + }; + format!("{opcode} rd_ptr = {a}, rs1_ptr = {b}, rs2 = {c}, rs2_as = {e}") + } + + // Load/Store instructions, see: + // https://github.com/openvm-org/openvm/blob/v1.0.0/extensions/rv32im/circuit/src/adapters/loadstore.rs#L340-L346 + 528..=535 => { + assert_eq!(d, &F::ONE); + let opcode = match opcode.as_usize() { + 528 => "LOADW", + 529 => "LOADBU", + 530 => "LOADHU", + 531 => "STOREW", + 532 => "STOREH", + 533 => "STOREB", + 534 => "LOADB", + 535 => "LOADH", + _ => unreachable!(), + }; + format!("{opcode} rd_rs2_ptr = {a}, rs1_ptr = {b}, imm = {c}, mem_as = {e}, needs_write = {f}, imm_sign = {g}") + } + + 544 => format!("BEQ {a} {b} {c} {d} {e}"), + 545 => format!("BNE {a} {b} {c} {d} {e}"), + + 549 => format!("BLT {a} {b} {c} {d} {e}"), + 550 => format!("BLTU {a} {b} {c} {d} {e}"), + 551 => format!("BGE {a} {b} {c} {d} {e}"), + 552 => format!("BGEU {a} {b} {c} {d} {e}"), + + 560 => format!("JAL {a} {b} {c} {d} {e}"), + 561 => format!("LUI {a} {b} {c} {d} {e}"), + + 565 => format!("JALR {a} {b} {c} {d} {e}"), + + 576 => format!("AUIPC {a} {b} {c} {d} {e}"), + + 592 => format!("MUL {a} {b} {c} {d} {e}"), + 593 => format!("MULH {a} {b} {c} {d} {e}"), + 594 => format!("MULHSU {a} {b} {c} {d} {e}"), + 595 => format!("MULHU {a} {b} {c} {d} {e}"), + + 596 => format!("DIV {a} {b} {c} {d} {e}"), + 597 => format!("DIVU {a} {b} {c} {d} {e}"), + 598 => format!("REM {a} {b} {c} {d} {e}"), + 599 => format!("REMU {a} {b} {c} {d} {e}"), + + 608 => format!("HINT_STOREW {a} {b} {c} {d} {e}"), + 609 => format!("HINT_BUFFER {a} {b} {c} {d} {e}"), + _ => format!(" {a} {b} {c} {d} {e} {f} {g}"), + } +} diff --git a/openvm/src/lib.rs b/openvm/src/lib.rs new file mode 100644 index 000000000..b1ba215aa --- /dev/null +++ b/openvm/src/lib.rs @@ -0,0 +1,777 @@ +use eyre::Result; +use itertools::{multiunzip, Itertools}; +use openvm_build::{ + build_guest_package, find_unique_executable, get_package, GuestOptions, TargetFilter, +}; +use openvm_circuit::arch::{ + instructions::exe::VmExe, Streams, SystemConfig, VirtualMachine, VmChipComplex, VmConfig, + VmInventoryError, +}; +use openvm_native_recursion::halo2::utils::CacheHalo2ParamsReader; +use openvm_stark_backend::{ + air_builders::symbolic::SymbolicConstraints, engine::StarkEngine, rap::AnyRap, +}; +use openvm_stark_sdk::{config::fri_params::SecurityParameters, engine::StarkFriEngine}; +use powdr::FieldElement; +use powdr_autoprecompiles::SymbolicMachine; +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::{Arc, Mutex}, +}; +use utils::get_pil; + +use crate::customize_exe::openvm_bus_interaction_to_powdr; +use crate::utils::symbolic_to_algebraic; +use openvm_circuit_primitives_derive::ChipUsageGetter; +use openvm_sdk::{ + config::{AggConfig, AppConfig, SdkVmConfig, SdkVmConfigExecutor, SdkVmConfigPeriphery}, + DefaultStaticVerifierPvHandler, Sdk, StdIn, +}; +use openvm_stark_backend::{config::StarkGenericConfig, Chip}; +use openvm_stark_sdk::config::{ + baby_bear_poseidon2::{config_from_perm, default_perm, BabyBearPoseidon2Engine}, + FriParameters, +}; +use openvm_stark_sdk::{ + config::baby_bear_poseidon2::BabyBearPoseidon2Config, + openvm_stark_backend::p3_field::{Field, PrimeField32}, + p3_baby_bear::BabyBear, +}; +use powdr_extension::{PowdrExecutor, PowdrExtension, PowdrPeriphery}; +use serde::{Deserialize, Serialize}; +use std::collections::BTreeMap; + +mod air_builder; +use air_builder::AirKeygenBuilder; +use derive_more::From; +use openvm_circuit::{ + circuit_derive::Chip, + derive::{AnyEnum, InstructionExecutor as InstructionExecutorDerive}, +}; +mod utils; + +use tracing::dispatcher::Dispatch; +use tracing::field::Field as TracingField; +use tracing::{Event, Subscriber}; +use tracing_subscriber::{ + layer::Context, + prelude::*, + registry::{LookupSpan, Registry}, + Layer, +}; + +type SC = BabyBearPoseidon2Config; +pub type F = BabyBear; + +/// We do not use the transpiler, instead we customize an already transpiled program +mod customize_exe; + +// A module for our extension +mod powdr_extension; + +mod bus_interaction_handler; +mod instruction_formatter; + +#[allow(dead_code)] +mod plonk; + +/// A custom VmConfig that wraps the SdkVmConfig, adding our custom extension. +#[derive(Serialize, Deserialize, Clone)] +#[serde(bound = "F: Field")] +pub struct SpecializedConfig { + sdk_config: SdkVmConfig, + powdr: PowdrExtension, +} + +#[allow(clippy::large_enum_variant)] +#[derive(ChipUsageGetter, Chip, InstructionExecutorDerive, From, AnyEnum)] +pub enum SpecializedExecutor { + #[any_enum] + SdkExecutor(SdkVmConfigExecutor), + #[any_enum] + PowdrExecutor(PowdrExecutor), +} + +#[derive(From, ChipUsageGetter, Chip, AnyEnum)] +pub enum MyPeriphery { + #[any_enum] + SdkPeriphery(SdkVmConfigPeriphery), + #[any_enum] + PowdrPeriphery(PowdrPeriphery), +} + +impl VmConfig for SpecializedConfig { + type Executor = SpecializedExecutor; + type Periphery = MyPeriphery; + + fn system(&self) -> &SystemConfig { + VmConfig::::system(&self.sdk_config) + } + + fn system_mut(&mut self) -> &mut SystemConfig { + VmConfig::::system_mut(&mut self.sdk_config) + } + + fn create_chip_complex( + &self, + ) -> Result, VmInventoryError> { + let chip = self.sdk_config.create_chip_complex()?; + let chip = chip.extend(&self.powdr)?; + + Ok(chip) + } +} + +impl SpecializedConfig { + fn from_base_and_extension(sdk_config: SdkVmConfig, powdr: PowdrExtension) -> Self { + Self { sdk_config, powdr } + } +} + +pub fn build_elf_path>( + guest_opts: GuestOptions, + pkg_dir: P, + target_filter: &Option, +) -> Result { + let pkg = get_package(pkg_dir.as_ref()); + let target_dir = match build_guest_package(&pkg, &guest_opts, None, target_filter) { + Ok(target_dir) => target_dir, + Err(Some(code)) => { + return Err(eyre::eyre!("Failed to build guest: code = {}", code)); + } + Err(None) => { + return Err(eyre::eyre!( + "Failed to build guest (OPENVM_SKIP_BUILD is set)" + )); + } + }; + + find_unique_executable(pkg_dir, target_dir, target_filter) +} + +// compile the original openvm program without powdr extension +pub fn compile_openvm( + guest: &str, +) -> Result, Box> { + // wrap the sdk config (with the standard extensions) in our custom config (with our custom extension) + let sdk_vm_config = SdkVmConfig::builder() + .system(Default::default()) + .rv32i(Default::default()) + .rv32m(Default::default()) + .io(Default::default()) + .keccak(Default::default()) + .build(); + + let sdk = Sdk::default(); + + // Build the ELF with guest options and a target filter. + // We need these extra Rust flags to get the labels. + let guest_opts = GuestOptions::default(); + let guest_opts = guest_opts.with_rustc_flags(vec!["-C", "link-arg=--emit-relocs"]); + + // Point to our local guest + use std::path::PathBuf; + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).to_path_buf(); + path.push(guest); + let target_path = path.to_str().unwrap(); + + let elf = sdk.build(guest_opts, target_path, &Default::default())?; + + // Transpile the ELF into a VmExe. Note that this happens using the sdk transpiler only, our extension does not use a transpiler. + let exe = sdk.transpile(elf, sdk_vm_config.transpiler())?; + + Ok(OriginalCompiledProgram { exe, sdk_vm_config }) +} + +pub fn compile_guest( + guest: &str, + autoprecompiles: usize, + skip: usize, + pgo_data: Option>, +) -> Result, Box> { + let OriginalCompiledProgram { exe, sdk_vm_config } = compile_openvm(guest)?; + compile_exe(guest, exe, sdk_vm_config, autoprecompiles, skip, pgo_data) +} + +pub fn compile_exe( + guest: &str, + exe: VmExe, + sdk_vm_config: SdkVmConfig, + autoprecompiles: usize, + skip: usize, + pgo_data: Option>, +) -> Result, Box> { + // Build the ELF with guest options and a target filter. + // We need these extra Rust flags to get the labels. + let guest_opts = GuestOptions::default(); + let guest_opts = guest_opts.with_rustc_flags(vec!["-C", "link-arg=--emit-relocs"]); + + // Point to our local guest + use std::path::PathBuf; + let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).to_path_buf(); + path.push(guest); + let target_path = path.to_str().unwrap(); + + let elf_binary = build_elf_path(guest_opts.clone(), target_path, &Default::default())?; + let elf_powdr = powdr::riscv::elf::load_elf(&elf_binary); + + let airs = + instructions_to_airs::<_, powdr::number::BabyBearField>(exe.clone(), sdk_vm_config.clone()); + + let (exe, extension) = customize_exe::customize( + exe, + sdk_vm_config.clone(), + &elf_powdr.text_labels, + &airs, + autoprecompiles, + skip, + pgo_data, + ); + // Generate the custom config based on the generated instructions + let vm_config = SpecializedConfig::from_base_and_extension(sdk_vm_config, extension); + export_pil(vm_config.clone(), "debug.pil", 1000); + + Ok(CompiledProgram { exe, vm_config }) +} + +#[derive(Serialize, Deserialize, Clone)] +#[serde(bound = "F: Field")] +pub struct CompiledProgram { + pub exe: VmExe, + pub vm_config: SpecializedConfig, +} + +// the original openvm program and config without powdr extension +pub struct OriginalCompiledProgram { + pub exe: VmExe, + pub sdk_vm_config: SdkVmConfig, +} + +pub struct AirMetrics { + pub name: String, + pub width: usize, + pub constraints: usize, + pub bus_interactions: usize, +} + +impl CompiledProgram { + pub fn powdr_airs_metrics(&self) -> Vec { + let chip_complex: VmChipComplex<_, _, _> = self.vm_config.create_chip_complex().unwrap(); + + chip_complex + .inventory + .executors() + .iter() + .filter_map(|executor| { + let air = executor.air(); + let width = air.width(); + let name = air.name(); + + // We actually give name "powdr_air_for_opcode_" to the AIRs, + // but OpenVM uses the actual Rust type (PowdrAir) as the name in this method. + // TODO this is hacky but not sure how to do it better rn. + if name.starts_with("PowdrAir") { + let constraints = get_constraints(air); + Some(AirMetrics { + name: name.to_string(), + width, + constraints: constraints.constraints.len(), + bus_interactions: constraints.interactions.len(), + }) + } else { + None + } + }) + .collect() + } +} + +pub fn execute( + program: CompiledProgram, + inputs: StdIn, +) -> Result<(), Box> { + let CompiledProgram { exe, vm_config } = program; + + let sdk = Sdk::default(); + + let output = sdk.execute(exe.clone(), vm_config.clone(), inputs)?; + tracing::info!("Public values output: {:?}", output); + + Ok(()) +} + +pub fn pgo( + program: OriginalCompiledProgram, + inputs: StdIn, +) -> Result, Box> { + // in memory collector storage + let collected = Arc::new(Mutex::new(Vec::new())); + let collector_layer = PgoCollector { + pc: collected.clone(), + }; + + // build subscriber + let subscriber = Registry::default().with(collector_layer); + + // prepare for execute + let OriginalCompiledProgram { exe, sdk_vm_config } = program; + let sdk = Sdk::default(); + + // dispatch constructs a local subscriber at trace level that is invoked during pgo but doesn't override the global one at info level + let dispatch = Dispatch::new(subscriber); + tracing::dispatcher::with_default(&dispatch, || { + sdk.execute(exe.clone(), sdk_vm_config.clone(), inputs) + .unwrap(); + }); + + // collect the pc's during execution + let pc = collected.lock().unwrap().clone(); + + // create pc_index map to times executed, where pc_index = (pc - pc_base) / step + let pc_base = exe.program.pc_base; + let step = exe.program.step; + let pc_index_count = pc + .iter() + .fold(std::collections::HashMap::new(), |mut acc, pc| { + let pc_index = (*pc as u32 - pc_base) / step; + *acc.entry(pc_index).or_insert(0u32) += 1; + acc + }); + + // the smallest pc is the same as the base_pc if there's no stdin + let pc_min = pc.iter().min().unwrap(); + tracing::info!("pc_min: {}; pc_base: {}", pc_min, pc_base); + + // print the total and by pc counts at the warn level (default level in powdr-openvm) + tracing::warn!("Pgo captured {} pc's", pc.len()); + + // print pc_index map in descending order of pc_index count + let mut pc_index_count_sorted: Vec<_> = pc_index_count.iter().collect(); + pc_index_count_sorted.sort_by(|a, b| b.1.cmp(a.1)); + pc_index_count_sorted.iter().for_each(|(pc, count)| { + tracing::warn!("pc_index {}: {}", pc, count); + }); + + Ok(pc_index_count) +} + +pub fn prove( + program: &CompiledProgram, + mock: bool, + recursion: bool, + inputs: StdIn, +) -> Result<(), Box> { + let CompiledProgram { exe, vm_config } = program; + + let sdk = Sdk::default(); + + // Set app configuration + let app_log_blowup = 2; + let app_fri_params = FriParameters::standard_with_100_bits_conjectured_security(app_log_blowup); + let app_config = AppConfig::new(app_fri_params, vm_config.clone()); + + // Commit the exe + let app_committed_exe = sdk.commit_app_exe(app_fri_params, exe.clone())?; + + // Generate an AppProvingKey + let app_pk = Arc::new(sdk.app_keygen(app_config)?); + + if mock { + tracing::info!("Checking constraints and witness in Mock prover..."); + let engine = BabyBearPoseidon2Engine::new( + FriParameters::standard_with_100_bits_conjectured_security(app_log_blowup), + ); + let vm = VirtualMachine::new(engine, vm_config.clone()); + let pk = vm.keygen(); + let streams = Streams::from(inputs); + let mut result = vm.execute_and_generate(exe.clone(), streams).unwrap(); + let _final_memory = Option::take(&mut result.final_memory); + let global_airs = vm.config().create_chip_complex().unwrap().airs(); + for proof_input in &result.per_segment { + let (airs, pks, air_proof_inputs): (Vec<_>, Vec<_>, Vec<_>) = + multiunzip(proof_input.per_air.iter().map(|(air_id, air_proof_input)| { + ( + global_airs[*air_id].clone(), + pk.per_air[*air_id].clone(), + air_proof_input.clone(), + ) + })); + vm.engine.debug(&airs, &pks, &air_proof_inputs); + } + } else { + if !recursion { + // Generate a proof + tracing::info!("Generating proof..."); + let proof = + sdk.generate_app_proof(app_pk.clone(), app_committed_exe.clone(), inputs.clone())?; + tracing::info!("Proof generation done."); + + tracing::info!( + "Public values: {:?}", + proof.user_public_values.public_values + ); + + // Verify + let app_vk = app_pk.get_app_vk(); + sdk.verify_app_proof(&app_vk, &proof)?; + tracing::info!("Proof verification done."); + } else { + // Generate the aggregation proving key + const DEFAULT_PARAMS_DIR: &str = concat!(env!("HOME"), "/.openvm/params/"); + let halo2_params_reader = CacheHalo2ParamsReader::new(DEFAULT_PARAMS_DIR); + let agg_config = AggConfig::default(); + tracing::info!("Generating aggregation proving key..."); + let agg_pk = sdk.agg_keygen( + agg_config, + &halo2_params_reader, + &DefaultStaticVerifierPvHandler, + )?; + + tracing::info!("Generating SNARK verifier..."); + // Generate the SNARK verifier smart contract + let verifier = sdk.generate_halo2_verifier_solidity(&halo2_params_reader, &agg_pk)?; + + tracing::info!("Generating EVM proof..."); + // Generate an EVM proof + let proof = sdk.generate_evm_proof( + &halo2_params_reader, + app_pk, + app_committed_exe, + agg_pk, + inputs, + )?; + + tracing::info!("Verifying EVM proof..."); + // Verify the EVM proof + sdk.verify_evm_halo2_proof(&verifier, proof)?; + } + + tracing::info!("All done."); + } + + Ok(()) +} + +pub fn get_pc_idx_count(guest: &str, inputs: StdIn) -> HashMap { + let program = compile_openvm(guest).unwrap(); + // times executed by program index, where index = (pc - base_pc) / step + // help determine the basic blocks to create autoprecompile for + pgo(program, inputs).unwrap() +} + +pub fn instructions_to_airs, P: FieldElement>( + exe: VmExe, + vm_config: VC, +) -> BTreeMap> +where + VC::Executor: Chip, + VC::Periphery: Chip, +{ + let mut chip_complex: VmChipComplex<_, _, _> = vm_config.create_chip_complex().unwrap(); + exe.program + .instructions_and_debug_infos + .iter() + .map(|instr| instr.as_ref().unwrap().0.opcode) + .unique() + .filter_map(|op| { + chip_complex + .inventory + .get_mut_executor(&op) + .map(|executor| { + let air = executor.air(); + + let columns = get_columns(air.clone()); + + let constraints = get_constraints(air); + + let powdr_exprs = constraints + .constraints + .iter() + .map(|expr| symbolic_to_algebraic::(expr, &columns).into()) + .collect::>(); + + let powdr_bus_interactions = constraints + .interactions + .iter() + .map(|expr| openvm_bus_interaction_to_powdr(expr, &columns)) + .collect(); + + let symb_machine = SymbolicMachine { + constraints: powdr_exprs, + bus_interactions: powdr_bus_interactions, + }; + + (op.as_usize(), symb_machine) + }) + }) + .collect() +} + +pub fn export_pil>(vm_config: VC, path: &str, max_width: usize) +where + VC::Executor: Chip, + VC::Periphery: Chip, +{ + let chip_complex: VmChipComplex<_, _, _> = vm_config.create_chip_complex().unwrap(); + + let pil = chip_complex + .inventory + .executors() + .iter() + .filter_map(|executor| { + let air = executor.air(); + let width = air.width(); + let name = air.name(); + + if width > max_width { + log::warn!("Skipping {name} (width: {width})"); + return None; + } + + let columns = get_columns(air.clone()); + + let constraints = get_constraints(air); + + Some(get_pil(&name, &constraints, &columns, vec![])) + }) + .join("\n\n\n"); + + println!("Writing PIL..."); + std::fs::write(path, pil).unwrap(); + println!("Exported PIL to {path}"); +} + +fn get_columns(air: Arc>) -> Vec { + let width = air.width(); + air.columns() + .inspect(|columns| { + assert_eq!(columns.len(), width); + }) + .unwrap_or_else(|| (0..width).map(|i| format!("unknown_{i}")).collect()) +} + +fn get_constraints(air: Arc>) -> SymbolicConstraints { + let perm = default_perm(); + let security_params = SecurityParameters::standard_fast(); + let config = config_from_perm(&perm, security_params); + let air_keygen_builder = AirKeygenBuilder::new(config.pcs(), air); + let builder = air_keygen_builder.get_symbolic_builder(None); + builder.constraints() +} + +// holds basic type fields of execution objects captured in trace by subscriber +#[derive(Default)] +struct PgoData { + pc: Option, +} + +impl tracing::field::Visit for PgoData { + // when we receive a u64 field, they are parsed into fields of the pgo data + fn record_u64(&mut self, field: &tracing::field::Field, value: u64) { + if field.name() == "pc" { + self.pc = Some(value as usize); + } + } + + // required for implementation, but in practice we will only receive u64 fields + // the fields we receive are determined by the instruction trace print out of our openvm fork during execution + fn record_debug(&mut self, _: &TracingField, _: &dyn std::fmt::Debug) {} +} + +// A Layer that collects data we are interested in using for the pgo from the trace fields. +#[derive(Clone)] +struct PgoCollector { + pc: Arc>>, +} + +impl Layer for PgoCollector +where + S: Subscriber + for<'a> LookupSpan<'a>, +{ + fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { + // build a visitor to parse and hold trace fields we are interested in + let mut visitor = PgoData::default(); + event.record(&mut visitor); + + // because our subscriber is at the trace level, for trace print outs that don't match PgoData, + // the visitor can't parse them, and these cases are filtered out automatically + if let Some(pc) = visitor.pc { + self.pc.lock().unwrap().push(pc); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use test_log::test; + + fn compile_and_prove( + guest: &str, + apc: usize, + skip: usize, + mock: bool, + recursion: bool, + stdin: StdIn, + ) -> Result<(), Box> { + let program = compile_guest(guest, apc, skip, None).unwrap(); + prove(&program, mock, recursion, stdin) + } + + fn prove_simple(guest: &str, apc: usize, skip: usize, stdin: StdIn) { + let result = compile_and_prove(guest, apc, skip, false, false, stdin); + assert!(result.is_ok()); + } + + fn prove_mock(guest: &str, apc: usize, skip: usize, stdin: StdIn) { + let result = compile_and_prove(guest, apc, skip, true, false, stdin); + assert!(result.is_ok()); + } + + fn _prove_recursion(guest: &str, apc: usize, skip: usize, stdin: StdIn) { + let result = compile_and_prove(guest, apc, skip, false, true, stdin); + assert!(result.is_ok()); + } + + const GUEST: &str = "guest"; + const GUEST_ITER: u32 = 1 << 10; + const GUEST_APC: usize = 1; + const GUEST_SKIP: usize = 39; + const GUEST_SKIP_PGO: usize = 0; + + const GUEST_KECCAK: &str = "guest-keccak"; + const GUEST_KECCAK_ITER: u32 = 1000; + const GUEST_KECCAK_ITER_SMALL: u32 = 10; + const GUEST_KECCAK_APC: usize = 1; + const GUEST_KECCAK_SKIP: usize = 0; + + #[test] + fn guest_prove_simple() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_ITER); + prove_simple(GUEST, GUEST_APC, GUEST_SKIP, stdin); + } + + #[test] + fn guest_prove_mock() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_ITER); + prove_mock(GUEST, GUEST_APC, GUEST_SKIP, stdin); + } + + // #[test] + // #[ignore = "Too much RAM"] + // // TODO: This test currently panics because the kzg params are not set up correctly. Fix this. + // #[should_panic = "No such file or directory"] + // fn guest_prove_recursion() { + // let mut stdin = StdIn::default(); + // stdin.write(&GUEST_ITER); + // prove_recursion(GUEST, GUEST_APC, GUEST_SKIP, stdin); + // } + + #[test] + fn keccak_small_prove_simple() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_KECCAK_ITER_SMALL); + prove_simple(GUEST_KECCAK, GUEST_KECCAK_APC, GUEST_KECCAK_SKIP, stdin); + } + + #[test] + #[ignore = "Too long"] + fn keccak_prove_simple() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_KECCAK_ITER); + prove_simple(GUEST_KECCAK, GUEST_KECCAK_APC, GUEST_KECCAK_SKIP, stdin); + } + + #[test] + fn keccak_small_prove_mock() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_KECCAK_ITER_SMALL); + prove_mock(GUEST_KECCAK, GUEST_KECCAK_APC, GUEST_KECCAK_SKIP, stdin); + } + + #[test] + #[ignore = "Too long"] + fn keccak_prove_mock() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_KECCAK_ITER); + prove_mock(GUEST_KECCAK, GUEST_KECCAK_APC, GUEST_KECCAK_SKIP, stdin); + } + + // #[test] + // #[ignore = "Too much RAM"] + // // TODO: This test currently panics because the kzg params are not set up correctly. Fix this. + // #[should_panic = "No such file or directory"] + // fn keccak_prove_recursion() { + // let mut stdin = StdIn::default(); + // stdin.write(&GUEST_KECCAK_ITER); + // prove_recursion(GUEST_KECCAK, GUEST_KECCAK_APC, GUEST_KECCAK_SKIP, stdin); + // } + + // The following are compilation tests only + fn test_keccak_machine(pc_idx_count: Option>) { + let machines = compile_guest( + GUEST_KECCAK, + GUEST_KECCAK_APC, + GUEST_KECCAK_SKIP, + pc_idx_count, + ) + .unwrap() + .powdr_airs_metrics(); + assert_eq!(machines.len(), 1); + let m = &machines[0]; + assert_eq!(m.width, 7786); + assert_eq!(m.constraints, 506); + assert_eq!(m.bus_interactions, 6485); + } + + #[test] + fn guest_machine() { + let machines = compile_guest(GUEST, GUEST_APC, GUEST_SKIP, None) + .unwrap() + .powdr_airs_metrics(); + assert_eq!(machines.len(), 1); + let m = &machines[0]; + // TODO we need to find a new block because this one is not executed anymore. + assert_eq!(m.width, 157); + assert_eq!(m.constraints, 36); + assert_eq!(m.bus_interactions, 120); + } + + #[test] + fn guest_machine_pgo() { + // Input via StdIn + let mut stdin = StdIn::default(); + stdin.write(&GUEST_ITER); + + // Guest machine should have more optimized results with pgo + // because we didn't accelerate the "costliest block" in the non-pgo version. + let pc_idx_count = get_pc_idx_count(GUEST, stdin); + // We don't skip any sorted basic block here to accelerate the "costliest" block. + let machines = compile_guest(GUEST, GUEST_APC, GUEST_SKIP_PGO, Some(pc_idx_count)) + .unwrap() + .powdr_airs_metrics(); + assert_eq!(machines.len(), 1); + let m = &machines[0]; + assert_eq!(m.width, 68); + assert_eq!(m.constraints, 21); + assert_eq!(m.bus_interactions, 51); + } + + #[test] + fn keccak_machine() { + test_keccak_machine(None); + } + + #[test] + fn keccak_machine_pgo() { + let mut stdin = StdIn::default(); + stdin.write(&GUEST_KECCAK_ITER); + // Keccak machine should have the same results with pgo + // because we already accelerate the "costliest" block with the non-pgo version. + let pc_idx_count = get_pc_idx_count(GUEST_KECCAK, stdin); + test_keccak_machine(Some(pc_idx_count)); + } +} diff --git a/openvm/src/plonk/mod.rs b/openvm/src/plonk/mod.rs new file mode 100644 index 000000000..8a46d8ece --- /dev/null +++ b/openvm/src/plonk/mod.rs @@ -0,0 +1,31 @@ +/// A variable in a PlonK gate. +enum Variable { + /// A variable from the input constraint system. + /// At run-time, we can get the concrete values from the APC witness generation. + Witness(V), + /// A temporary variable (represented by an ID). Assuming there is at most one temporary variable in a gate, + /// we can solve for its value at run-time. + Tmp(usize), +} + +/// A PlonK gate. For each gate, the following equation must hold: +/// q_l * a + q_r * b + q_o * c + q_mul * a * b + q_const = 0 +/// where q_l, q_r, q_o, q_mul, and q_const are fixed coefficients +/// and a, b, c are variables. +/// If the same variable appears in multiple gates, a copy constraint +/// must be enforced. +struct Gate { + q_l: T, + q_r: T, + q_o: T, + q_mul: T, + q_const: T, + a: Variable, + b: Variable, + c: Variable, +} + +/// The PlonK circuit, which is just a collection of gates. +struct PlonkCircuit { + gates: Vec>, +} diff --git a/openvm/src/powdr_extension/chip.rs b/openvm/src/powdr_extension/chip.rs new file mode 100644 index 000000000..808714533 --- /dev/null +++ b/openvm/src/powdr_extension/chip.rs @@ -0,0 +1,841 @@ +// Mostly taken from [this openvm extension](https://github.com/openvm-org/openvm/blob/1b76fd5a900a7d69850ee9173969f70ef79c4c76/extensions/rv32im/circuit/src/auipc/core.rs#L1) + +use std::{ + collections::{BTreeMap, HashMap}, + sync::{Arc, Mutex}, +}; + +use crate::utils::algebraic_to_symbolic; + +use super::{ + opcode::PowdrOpcode, + vm::{OriginalInstruction, SdkVmInventory}, + PowdrPrecompile, +}; +use itertools::Itertools; +use openvm_circuit::{arch::VmConfig, system::memory::MemoryController}; +use openvm_circuit::{ + arch::{ + ExecutionState, InstructionExecutor, Result as ExecutionResult, VmChipComplex, + VmInventoryError, + }, + system::memory::OfflineMemory, + utils::next_power_of_two_or_zero, +}; +use openvm_circuit_primitives::{ + bitwise_op_lookup::SharedBitwiseOperationLookupChip, range_tuple::SharedRangeTupleCheckerChip, + var_range::SharedVariableRangeCheckerChip, +}; +use openvm_instructions::{instruction::Instruction, LocalOpcode}; +use openvm_native_circuit::CastFExtension; +use openvm_sdk::config::{SdkVmConfig, SdkVmConfigExecutor, SdkVmConfigPeriphery}; +use openvm_stark_backend::{ + air_builders::symbolic::{ + symbolic_expression::{SymbolicEvaluator, SymbolicExpression}, + symbolic_variable::{Entry, SymbolicVariable}, + }, + interaction::BusIndex, + p3_air::{Air, BaseAir}, + p3_field::FieldAlgebra, + p3_matrix::dense::RowMajorMatrix, + p3_maybe_rayon::prelude::{ + IndexedParallelIterator, IntoParallelIterator, ParallelIterator, ParallelSliceMut, + }, + rap::ColumnsAir, +}; + +use openvm_stark_backend::{ + config::{StarkGenericConfig, Val}, + interaction::InteractionBuilder, + p3_field::{Field, PrimeField32}, + p3_matrix::Matrix, + prover::types::AirProofInput, + rap::{AnyRap, BaseAirWithPublicValues, PartitionedBaseAir}, + Chip, ChipUsageGetter, +}; +use powdr_autoprecompiles::powdr::{Column, UniqueColumns}; +use serde::{Deserialize, Serialize}; + +pub struct PowdrChip { + pub name: String, + pub opcode: PowdrOpcode, + /// An "executor" for this chip, based on the original instructions in the basic block + pub executor: PowdrExecutor, + pub air: Arc>, + pub periphery: SharedChips, +} + +// Extracted from openvm, extended to create an inventory with the correct memory +fn create_chip_complex_with_memory( + memory: Arc>>, + range_checker: SharedVariableRangeCheckerChip, + base_config: SdkVmConfig, +) -> std::result::Result< + VmChipComplex, SdkVmConfigPeriphery>, + VmInventoryError, +> { + use openvm_keccak256_circuit::Keccak256; + use openvm_native_circuit::Native; + use openvm_rv32im_circuit::{Rv32I, Rv32Io}; + use openvm_sha256_circuit::Sha256; + + let this = base_config; + let mut complex = this.system.config.create_chip_complex()?.transmute(); + + // CHANGE: inject the correct memory here to be passed to the chips, to be accessible in their get_proof_input + complex.base.memory_controller.offline_memory = memory.clone(); + complex.base.range_checker_chip = range_checker; + // END CHANGE + + if this.rv32i.is_some() { + complex = complex.extend(&Rv32I)?; + } + if this.io.is_some() { + complex = complex.extend(&Rv32Io)?; + } + if this.keccak.is_some() { + complex = complex.extend(&Keccak256)?; + } + if this.sha256.is_some() { + complex = complex.extend(&Sha256)?; + } + if this.native.is_some() { + complex = complex.extend(&Native)?; + } + if this.castf.is_some() { + complex = complex.extend(&CastFExtension)?; + } + + if let Some(rv32m) = this.rv32m { + let mut rv32m = rv32m; + if let Some(ref bigint) = this.bigint { + rv32m.range_tuple_checker_sizes[0] = + rv32m.range_tuple_checker_sizes[0].max(bigint.range_tuple_checker_sizes[0]); + rv32m.range_tuple_checker_sizes[1] = + rv32m.range_tuple_checker_sizes[1].max(bigint.range_tuple_checker_sizes[1]); + } + complex = complex.extend(&rv32m)?; + } + if let Some(bigint) = this.bigint { + let mut bigint = bigint; + if let Some(ref rv32m) = this.rv32m { + bigint.range_tuple_checker_sizes[0] = + rv32m.range_tuple_checker_sizes[0].max(bigint.range_tuple_checker_sizes[0]); + bigint.range_tuple_checker_sizes[1] = + rv32m.range_tuple_checker_sizes[1].max(bigint.range_tuple_checker_sizes[1]); + } + complex = complex.extend(&bigint)?; + } + if let Some(ref modular) = this.modular { + complex = complex.extend(modular)?; + } + if let Some(ref fp2) = this.fp2 { + complex = complex.extend(fp2)?; + } + if let Some(ref pairing) = this.pairing { + complex = complex.extend(pairing)?; + } + if let Some(ref ecc) = this.ecc { + complex = complex.extend(ecc)?; + } + + Ok(complex) +} + +/// A struct which holds the state of the execution based on the original instructions in this block and a dummy inventory. +pub struct PowdrExecutor { + instructions: Vec>, + air_by_opcode_id: BTreeMap>, + is_valid_poly_id: u64, + inventory: SdkVmInventory, + current_trace_height: usize, +} + +impl PowdrExecutor { + fn new( + instructions: Vec>, + air_by_opcode_id: BTreeMap>, + is_valid_column: Column, + memory: Arc>>, + range_checker: &SharedVariableRangeCheckerChip, + base_config: SdkVmConfig, + ) -> Self { + Self { + instructions, + air_by_opcode_id, + is_valid_poly_id: is_valid_column.id.id, + inventory: create_chip_complex_with_memory( + memory, + range_checker.clone(), + base_config.clone(), + ) + .unwrap() + .inventory, + current_trace_height: 0, + } + } + + fn execute( + &mut self, + memory: &mut MemoryController, + from_state: ExecutionState, + ) -> ExecutionResult> { + // execute the original instructions one by one + let res = self + .instructions + .iter() + .try_fold(from_state, |execution_state, instruction| { + let executor = self + .inventory + .get_mut_executor(&instruction.opcode()) + .unwrap(); + executor.execute(memory, instruction.as_ref(), execution_state) + }); + + self.current_trace_height += 1; + + res + } +} + +/// The shared chips which can be used by the PowdrChip. +pub struct SharedChips { + bitwise_lookup_8: SharedBitwiseOperationLookupChip<8>, + range_checker: SharedVariableRangeCheckerChip, + tuple_range_checker: Option>, +} + +impl SharedChips { + pub fn new( + bitwise_lookup_8: SharedBitwiseOperationLookupChip<8>, + range_checker: SharedVariableRangeCheckerChip, + tuple_range_checker: Option>, + ) -> Self { + Self { + bitwise_lookup_8, + range_checker, + tuple_range_checker, + } + } +} + +impl SharedChips { + /// Sends concrete values to the shared chips using a given bus id. + /// Panics if the bus id doesn't match any of the chips' bus ids. + fn apply(&self, bus_id: u16, mult: u32, args: &[u32]) { + match bus_id { + id if id == self.bitwise_lookup_8.bus().inner.index => { + // bitwise operation lookup + // interpret the arguments, see `Air for BitwiseOperationLookupAir` + let [x, y, x_xor_y, selector] = args.try_into().unwrap(); + + for _ in 0..mult { + match selector { + 0 => { + self.bitwise_lookup_8.request_range(x, y); + } + 1 => { + let res = self.bitwise_lookup_8.request_xor(x, y); + debug_assert_eq!(res, x_xor_y); + } + _ => { + unreachable!("Invalid selector"); + } + } + } + } + id if id == self.range_checker.bus().index() => { + // interpret the arguments, see `Air for VariableRangeCheckerAir` + let [value, max_bits] = args.try_into().unwrap(); + + for _ in 0..mult { + self.range_checker.add_count(value, max_bits as usize); + } + } + id if Some(id) + == self + .tuple_range_checker + .as_ref() + .map(|c| c.bus().inner.index) => + { + // tuple range checker + // We pass a slice. It is checked inside `add_count`. + for _ in 0..mult { + self.tuple_range_checker.as_ref().unwrap().add_count(args); + } + } + 0..=2 => { + // execution bridge, memory, pc lookup + // do nothing + } + _ => { + unreachable!("Bus interaction {} not implemented", bus_id); + } + } + } +} + +impl PowdrChip { + pub(crate) fn new( + precompile: PowdrPrecompile, + memory: Arc>>, + base_config: SdkVmConfig, + periphery: SharedChips, + ) -> Self { + let air: PowdrAir = PowdrAir::new(precompile.machine); + let original_airs = precompile + .original_airs + .into_iter() + .map(|(k, v)| (k, v.into())) + .collect(); + let executor = PowdrExecutor::new( + precompile.original_instructions, + original_airs, + precompile.is_valid_column, + memory, + &periphery.range_checker, + base_config, + ); + let name = precompile.name; + let opcode = precompile.opcode; + + Self { + name, + opcode, + air: Arc::new(air), + executor, + periphery, + } + } + + /// Returns the index of the is_valid of this air. + fn get_is_valid_index(&self) -> usize { + self.air.column_index_by_poly_id[&self.executor.is_valid_poly_id] + } +} + +impl InstructionExecutor for PowdrChip { + fn execute( + &mut self, + memory: &mut MemoryController, + instruction: &Instruction, + from_state: ExecutionState, + ) -> ExecutionResult> { + let &Instruction { opcode, .. } = instruction; + assert_eq!(opcode.as_usize(), self.opcode.global_opcode().as_usize()); + + let execution_state = self.executor.execute(memory, from_state)?; + + Ok(execution_state) + } + + fn get_opcode_name(&self, _: usize) -> String { + self.name.clone() + } +} + +impl ChipUsageGetter for PowdrChip { + fn air_name(&self) -> String { + format!("powdr_air_for_opcode_{}", self.opcode.global_opcode()).to_string() + } + fn current_trace_height(&self) -> usize { + self.executor.current_trace_height + } + + fn trace_width(&self) -> usize { + self.air.width() + } +} + +impl Chip for PowdrChip> +where + Val: PrimeField32, +{ + fn air(&self) -> Arc> { + self.air.clone() + } + + fn generate_air_proof_input(self) -> AirProofInput { + tracing::trace!("Generating air proof input for PowdrChip {}", self.name); + + let is_valid_index = self.get_is_valid_index(); + let num_records = self.current_trace_height(); + let height = next_power_of_two_or_zero(num_records); + let width = self.air.width(); + let mut values = Val::::zero_vec(height * width); + + // for each original opcode, the name of the dummy air it corresponds to + let air_name_by_opcode = self + .executor + .instructions + .iter() + .map(|instruction| instruction.opcode()) + .unique() + .map(|opcode| { + ( + opcode, + self.executor + .inventory + .get_executor(opcode) + .unwrap() + .air_name(), + ) + }) + .collect::>(); + + let dummy_trace_by_air_name: HashMap<_, _> = self + .executor + .inventory + .executors + .into_iter() + .map(|executor| { + ( + executor.air_name(), + Chip::::generate_air_proof_input(executor) + .raw + .common_main + .unwrap(), + ) + }) + .collect(); + + let instruction_index_to_table_offset = self + .executor + .instructions + .iter() + .enumerate() + .scan( + HashMap::default(), + |counts: &mut HashMap<&str, usize>, (index, instruction)| { + let air_name = air_name_by_opcode.get(&instruction.opcode()).unwrap(); + let count = counts.entry(air_name).or_default(); + let current_count = *count; + *count += 1; + Some((index, (air_name, current_count))) + }, + ) + .collect::>(); + + let occurrences_by_table_name: HashMap<&String, usize> = self + .executor + .instructions + .iter() + .map(|instruction| air_name_by_opcode.get(&instruction.opcode()).unwrap()) + .counts(); + + // A vector of HashMap by instruction, empty HashMap if none maps to apc + let dummy_trace_index_to_apc_index_by_instruction: Vec> = self + .executor + .instructions + .iter() + .map(|instruction| { + // look up how many dummy‐cells this AIR produces: + let air_width = dummy_trace_by_air_name + .get(air_name_by_opcode.get(&instruction.opcode()).unwrap()) + .unwrap() + .width(); + + // build a map only of the (dummy_index -> apc_index) pairs + let mut map = HashMap::with_capacity(air_width); + for dummy_trace_index in 0..air_width { + if let Ok(apc_index) = global_index( + dummy_trace_index, + instruction, + &self.air.column_index_by_poly_id, + ) { + if map.insert(dummy_trace_index, apc_index).is_some() { + panic!( + "duplicate dummy_trace_index {} for instruction opcode {:?}", + dummy_trace_index, + instruction.opcode() + ); + } + } + } + map + }) + .collect(); + + assert_eq!( + self.executor.instructions.len(), + dummy_trace_index_to_apc_index_by_instruction.len() + ); + + let dummy_values = (0..num_records).into_par_iter().map(|record_index| { + (0..self.executor.instructions.len()) + .map(|index| { + // get the air name and offset for this instruction (by index) + let (air_name, offset) = instruction_index_to_table_offset.get(&index).unwrap(); + // get the table + let table = dummy_trace_by_air_name.get(*air_name).unwrap(); + // get how many times this table is used per record + let occurrences_per_record = occurrences_by_table_name.get(air_name).unwrap(); + // get the width of each occurrence + let width = table.width(); + // start after the previous record ended, and offset by the correct offset + let start = (record_index * occurrences_per_record + offset) * width; + // end at the start + width + let end = start + width; + &table.values[start..end] + }) + .collect_vec() + }); + + // go through the final table and fill in the values + values + // a record is `width` values + .par_chunks_mut(width) + .zip(dummy_values) + .for_each(|(row_slice, dummy_values)| { + // map the dummy rows to the autoprecompile row + for (instruction_id, (instruction, dummy_row)) in self + .executor + .instructions + .iter() + .zip_eq(dummy_values) + .enumerate() + { + let evaluator = RowEvaluator::new(dummy_row, None); + + // first remove the side effects of this row on the main periphery + for range_checker_send in self + .executor + .air_by_opcode_id + .get(&instruction.as_ref().opcode.as_usize()) + .unwrap() + .bus_interactions + .iter() + .filter(|i| i.id == 3) + { + let mult = evaluator + .eval_expr(&range_checker_send.mult) + .as_canonical_u32(); + let args = range_checker_send + .args + .iter() + .map(|arg| evaluator.eval_expr(arg).as_canonical_u32()) + .collect_vec(); + let [value, max_bits] = args.try_into().unwrap(); + for _ in 0..mult { + self.periphery + .range_checker + .remove_count(value, max_bits as usize); + } + } + + write_dummy_to_autoprecompile_row( + row_slice, + dummy_row, + &dummy_trace_index_to_apc_index_by_instruction[instruction_id], + ); + } + + // Set the is_valid column to 1 + row_slice[is_valid_index] = >::ONE; + + let evaluator = + RowEvaluator::new(row_slice, Some(&self.air.column_index_by_poly_id)); + + // replay the side effects of this row on the main periphery + for bus_interaction in self.air.machine.bus_interactions.iter() { + let mult = evaluator + .eval_expr(&bus_interaction.mult) + .as_canonical_u32(); + let args = bus_interaction + .args + .iter() + .map(|arg| evaluator.eval_expr(arg).as_canonical_u32()) + .collect_vec(); + + self.periphery.apply(bus_interaction.id, mult, &args); + } + }); + + let trace = RowMajorMatrix::new(values, width); + + AirProofInput::simple(trace, vec![]) + } +} + +fn write_dummy_to_autoprecompile_row( + row_slice: &mut [F], + dummy_row: &[F], + dummy_trace_index_to_apc_index: &HashMap, +) { + for (dummy_trace_index, apc_index) in dummy_trace_index_to_apc_index { + row_slice[*apc_index] = dummy_row[*dummy_trace_index]; + } +} + +enum IndexError { + NotInDummy, + NotInAutoprecompile, +} + +/// Maps the index of a column in the original AIR of a given instruction to the corresponding +/// index in the autoprecompile AIR. +fn global_index( + local_index: usize, + instruction: &OriginalInstruction, + autoprecompile_index_by_poly_id: &BTreeMap, +) -> Result { + // Map to the poly_id in the original instruction to the poly_id in the autoprecompile. + let autoprecompile_poly_id = instruction + .subs + .get(local_index) + .ok_or(IndexError::NotInDummy)?; + // Map to the index in the autoprecompile. + let variable_index = autoprecompile_index_by_poly_id + .get(autoprecompile_poly_id) + .ok_or(IndexError::NotInAutoprecompile)?; + Ok(*variable_index) +} + +pub struct PowdrAir { + /// The columns in arbitrary order + columns: Vec, + /// The mapping from poly_id id to the index in the list of columns. + /// The values are always unique and contiguous + column_index_by_poly_id: BTreeMap, + machine: SymbolicMachine, +} + +impl ColumnsAir for PowdrAir { + fn columns(&self) -> Option> { + Some(self.columns.iter().map(|c| c.name.clone()).collect()) + } +} + +pub struct RowEvaluator<'a, F: PrimeField32> { + pub row: &'a [F], + pub witness_id_to_index: Option<&'a BTreeMap>, +} + +impl<'a, F: PrimeField32> RowEvaluator<'a, F> { + pub fn new(row: &'a [F], witness_id_to_index: Option<&'a BTreeMap>) -> Self { + Self { + row, + witness_id_to_index, + } + } +} + +impl SymbolicEvaluator for RowEvaluator<'_, F> { + fn eval_const(&self, c: F) -> F { + c + } + + fn eval_var(&self, symbolic_var: SymbolicVariable) -> F { + match symbolic_var.entry { + Entry::Main { + part_index: 0, + offset: 0, + } => { + let index = if let Some(witness_id_to_index) = self.witness_id_to_index { + witness_id_to_index[&(symbolic_var.index as u64)] + } else { + symbolic_var.index + }; + self.row[index] + } + // currently only the current rotation of the main is supported + // next rotation is not supported because this is a single row evaluator + _ => unreachable!(), + } + } + fn eval_is_first_row(&self) -> F { + unreachable!() + } + fn eval_is_last_row(&self) -> F { + unreachable!() + } + fn eval_is_transition(&self) -> F { + unreachable!() + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(bound = "F: Field")] +pub struct SymbolicMachine { + columns: Vec, + constraints: Vec>, + bus_interactions: Vec>, +} + +impl From> for SymbolicMachine { + fn from(machine: powdr_autoprecompiles::SymbolicMachine) -> Self { + let columns = machine.unique_columns().collect(); + + let powdr_autoprecompiles::SymbolicMachine { + constraints, + bus_interactions, + } = machine; + Self { + columns, + constraints: constraints + .into_iter() + .map(SymbolicConstraint::from) + .collect(), + bus_interactions: bus_interactions + .into_iter() + .map(SymbolicBusInteraction::from) + .collect(), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(bound = "F: Field")] +struct SymbolicConstraint { + expr: SymbolicExpression, +} + +impl From> for SymbolicConstraint { + fn from(constraint: powdr_autoprecompiles::SymbolicConstraint) -> Self { + let powdr_autoprecompiles::SymbolicConstraint { expr } = constraint; + Self { + expr: algebraic_to_symbolic(&expr), + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(bound = "F: Field")] +struct SymbolicBusInteraction { + id: BusIndex, + mult: SymbolicExpression, + args: Vec>, + count_weight: u32, +} + +impl From> + for SymbolicBusInteraction +{ + fn from(bus_interaction: powdr_autoprecompiles::SymbolicBusInteraction) -> Self { + let powdr_autoprecompiles::SymbolicBusInteraction { id, mult, args, .. } = bus_interaction; + let mult = algebraic_to_symbolic(&mult); + let args = args.iter().map(algebraic_to_symbolic).collect(); + Self { + id: id as BusIndex, + mult, + args, + // TODO: Is this correct? + count_weight: 1, + } + } +} + +impl PowdrAir { + pub fn new(machine: powdr_autoprecompiles::SymbolicMachine) -> Self { + let (column_index_by_poly_id, columns): (BTreeMap<_, _>, Vec<_>) = machine + .unique_columns() + .enumerate() + .map(|(index, c)| ((c.id.id, index), c.clone())) + .unzip(); + + Self { + columns, + column_index_by_poly_id, + machine: machine.into(), + } + } +} + +impl BaseAir for PowdrAir { + fn width(&self) -> usize { + let res = self.columns.len(); + assert!(res > 0); + res + } +} + +// No public values, but the trait is implemented +impl BaseAirWithPublicValues for PowdrAir {} + +impl Air for PowdrAir +where + AB::F: PrimeField32, +{ + fn eval(&self, builder: &mut AB) { + let main = builder.main(); + let witnesses = main.row_slice(0); + // TODO: cache? + let witness_values: BTreeMap = self + .columns + .iter() + .map(|c| c.id.id) + .zip_eq(witnesses.iter().cloned()) + .collect(); + + let witness_evaluator = WitnessEvaluator::::new(&witness_values); + + for constraint in &self.machine.constraints { + let e = witness_evaluator.eval_expr(&constraint.expr); + builder.assert_zero(e); + } + + for interaction in &self.machine.bus_interactions { + let SymbolicBusInteraction { + id, + mult, + args, + count_weight, + } = interaction; + + let mult = witness_evaluator.eval_expr(mult); + let args = args + .iter() + .map(|arg| witness_evaluator.eval_expr(arg)) + .collect_vec(); + + builder.push_interaction(*id, args, mult, *count_weight); + } + } +} + +pub struct WitnessEvaluator<'a, AB: InteractionBuilder> { + pub witness: &'a BTreeMap, +} + +impl<'a, AB: InteractionBuilder> WitnessEvaluator<'a, AB> { + pub fn new(witness: &'a BTreeMap) -> Self { + Self { witness } + } +} + +impl SymbolicEvaluator for WitnessEvaluator<'_, AB> { + fn eval_const(&self, c: AB::F) -> AB::Expr { + c.into() + } + + fn eval_var(&self, symbolic_var: SymbolicVariable) -> AB::Expr { + match symbolic_var.entry { + Entry::Main { part_index, offset } => { + assert_eq!(part_index, 0); + assert_eq!(offset, 0); + (*self.witness.get(&(symbolic_var.index as u64)).unwrap()).into() + } + Entry::Public => unreachable!("Public variables are not supported"), + Entry::Challenge => unreachable!("Challenges are not supported"), + Entry::Exposed => unreachable!("Exposed values are not supported"), + Entry::Preprocessed { .. } => { + unimplemented!("Preprocessed values are not supported yet") + } + Entry::Permutation { .. } => unreachable!("Permutation values are not supported"), + } + } + + fn eval_is_first_row(&self) -> AB::Expr { + unimplemented!() + } + + fn eval_is_last_row(&self) -> AB::Expr { + unimplemented!() + } + + fn eval_is_transition(&self) -> AB::Expr { + unimplemented!() + } +} + +impl PartitionedBaseAir for PowdrAir {} diff --git a/openvm/src/powdr_extension/mod.rs b/openvm/src/powdr_extension/mod.rs new file mode 100644 index 000000000..354e3b393 --- /dev/null +++ b/openvm/src/powdr_extension/mod.rs @@ -0,0 +1,9 @@ +/// The core logic of our extension +pub mod chip; +/// The opcodes for the powdr instructions, which is used in the chip implementation and contains the opcode ID +pub mod opcode; +/// The integration of our extension with the VM +mod vm; + +pub use opcode::PowdrOpcode; +pub use vm::{OriginalInstruction, PowdrExecutor, PowdrExtension, PowdrPeriphery, PowdrPrecompile}; diff --git a/openvm/src/powdr_extension/opcode.rs b/openvm/src/powdr_extension/opcode.rs new file mode 100644 index 000000000..37edff16b --- /dev/null +++ b/openvm/src/powdr_extension/opcode.rs @@ -0,0 +1,29 @@ +use openvm_instructions::LocalOpcode; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] +pub struct PowdrOpcode { + pub class_offset: usize, +} + +impl LocalOpcode for PowdrOpcode { + // This offset must not be accessed, since we want many opcodes of the same type to have different class_offsets. + // This is because each opcode has its own air. + const CLASS_OFFSET: usize = unreachable!(); + + fn from_usize(value: usize) -> Self { + Self { + class_offset: value, + } + } + + // The local offset is always 0, since we want to have many opcodes over the same air. + fn local_usize(&self) -> usize { + 0 + } + + // The global opcode is based on `class_offset`, *NOT* on the static `CLASS_OFFSET`. + fn global_opcode(&self) -> openvm_instructions::VmOpcode { + openvm_instructions::VmOpcode::from_usize(self.class_offset) + } +} diff --git a/openvm/src/powdr_extension/vm.rs b/openvm/src/powdr_extension/vm.rs new file mode 100644 index 000000000..df4871a24 --- /dev/null +++ b/openvm/src/powdr_extension/vm.rs @@ -0,0 +1,156 @@ +// Mostly taken from [this openvm extension](https://github.com/openvm-org/openvm/blob/1b76fd5a900a7d69850ee9173969f70ef79c4c76/extensions/rv32im/circuit/src/extension.rs#L185) and simplified to only handle a single opcode with its necessary dependencies + +use std::collections::BTreeMap; +use std::iter::once; + +use derive_more::From; + +use openvm_circuit::arch::VmInventoryError; +use openvm_circuit::{ + arch::{VmExtension, VmInventory}, + circuit_derive::{Chip, ChipUsageGetter}, + derive::{AnyEnum, InstructionExecutor}, + system::phantom::PhantomChip, +}; +use openvm_circuit_primitives::bitwise_op_lookup::SharedBitwiseOperationLookupChip; +use openvm_circuit_primitives::range_tuple::SharedRangeTupleCheckerChip; +use openvm_circuit_primitives::var_range::SharedVariableRangeCheckerChip; +use openvm_instructions::VmOpcode; +use openvm_instructions::{instruction::Instruction, LocalOpcode}; +use openvm_sdk::config::{SdkVmConfig, SdkVmConfigExecutor, SdkVmConfigPeriphery}; +use openvm_stark_backend::p3_field::{Field, PrimeField32}; +use powdr_autoprecompiles::powdr::Column; +use powdr_autoprecompiles::SymbolicMachine; +use serde::{Deserialize, Serialize}; + +use super::chip::SharedChips; +use super::{chip::PowdrChip, PowdrOpcode}; + +pub type SdkVmInventory = VmInventory, SdkVmConfigPeriphery>; + +#[derive(Clone, Deserialize, Serialize)] +#[serde(bound = "F: Field")] +pub struct PowdrExtension { + pub precompiles: Vec>, + pub base_config: SdkVmConfig, +} + +#[derive(Clone, Serialize, Deserialize)] +pub struct OriginalInstruction { + pub instruction: Instruction, + /// The autoprecompile poly_ids that the instruction points to, in the same order as the corresponding original columns + pub subs: Vec, +} + +impl OriginalInstruction { + pub fn new(instruction: Instruction, subs: Vec) -> Self { + Self { instruction, subs } + } + + pub fn opcode(&self) -> VmOpcode { + self.instruction.opcode + } +} + +impl AsRef> for OriginalInstruction { + fn as_ref(&self) -> &Instruction { + &self.instruction + } +} + +#[derive(Clone, Serialize, Deserialize)] +#[serde(bound = "F: Field")] +pub struct PowdrPrecompile { + pub name: String, + pub opcode: PowdrOpcode, + pub machine: SymbolicMachine, + pub original_instructions: Vec>, + pub original_airs: BTreeMap>, + pub is_valid_column: Column, +} + +impl PowdrPrecompile { + pub fn new( + name: String, + opcode: PowdrOpcode, + machine: SymbolicMachine, + original_instructions: Vec>, + original_airs: BTreeMap>, + is_valid_column: Column, + ) -> Self { + Self { + name, + opcode, + machine, + original_instructions, + original_airs, + is_valid_column, + } + } +} + +impl PowdrExtension { + pub fn new(precompiles: Vec>, base_config: SdkVmConfig) -> Self { + Self { + precompiles, + base_config, + } + } +} + +#[derive(ChipUsageGetter, Chip, InstructionExecutor, From, AnyEnum)] +pub enum PowdrExecutor { + Powdr(PowdrChip), +} + +#[derive(From, ChipUsageGetter, Chip, AnyEnum)] +pub enum PowdrPeriphery { + Sdk(SdkVmConfigPeriphery), + Phantom(PhantomChip), +} + +impl VmExtension for PowdrExtension { + type Executor = PowdrExecutor; + + type Periphery = PowdrPeriphery; + + fn build( + &self, + builder: &mut openvm_circuit::arch::VmInventoryBuilder, + ) -> Result, VmInventoryError> { + let mut inventory = VmInventory::new(); + + let offline_memory = builder.system_base().offline_memory(); + + // TODO: here we make assumptions about the existence of some chips in the periphery. Make this more flexible + let bitwise_lookup = *builder + .find_chip::>() + .first() + .unwrap(); + let range_checker = *builder + .find_chip::() + .first() + .unwrap(); + let tuple_range_checker = builder + .find_chip::>() + .first() + .cloned(); + + for precompile in &self.precompiles { + let powdr_chip: PowdrChip = PowdrChip::new( + precompile.clone(), + offline_memory.clone(), + self.base_config.clone(), + SharedChips::new( + bitwise_lookup.clone(), + range_checker.clone(), + tuple_range_checker.cloned(), + ), + ); + + inventory.add_executor(powdr_chip, once(precompile.opcode.global_opcode()))?; + } + + Ok(inventory) + } +} diff --git a/openvm/src/utils.rs b/openvm/src/utils.rs new file mode 100644 index 000000000..6062ca1a5 --- /dev/null +++ b/openvm/src/utils.rs @@ -0,0 +1,299 @@ +use std::{collections::BTreeMap, sync::Arc}; + +use itertools::Itertools; +use openvm_stark_backend::{ + air_builders::symbolic::{ + symbolic_expression::SymbolicExpression, + symbolic_variable::{Entry, SymbolicVariable}, + SymbolicConstraints, + }, + interaction::Interaction, + p3_field::PrimeField32, +}; +use powdr::number::FieldElement; +use powdr::{ + ast::analyzed::{ + AlgebraicBinaryOperation, AlgebraicBinaryOperator, AlgebraicExpression, AlgebraicReference, + AlgebraicUnaryOperation, AlgebraicUnaryOperator, PolyID, PolynomialType, + }, + number::BabyBearField, +}; + +pub fn algebraic_to_symbolic( + expr: &AlgebraicExpression, +) -> SymbolicExpression { + match expr { + AlgebraicExpression::Number(n) => SymbolicExpression::Constant(*n), + AlgebraicExpression::BinaryOperation(binary) => match binary.op { + AlgebraicBinaryOperator::Add => SymbolicExpression::Add { + x: Arc::new(algebraic_to_symbolic(&binary.left)), + y: Arc::new(algebraic_to_symbolic(&binary.right)), + degree_multiple: 0, + }, + AlgebraicBinaryOperator::Sub => SymbolicExpression::Sub { + x: Arc::new(algebraic_to_symbolic(&binary.left)), + y: Arc::new(algebraic_to_symbolic(&binary.right)), + degree_multiple: 0, + }, + AlgebraicBinaryOperator::Mul => SymbolicExpression::Mul { + x: Arc::new(algebraic_to_symbolic(&binary.left)), + y: Arc::new(algebraic_to_symbolic(&binary.right)), + degree_multiple: 0, + }, + AlgebraicBinaryOperator::Pow => { + // Assuming the right operand is a constant number + let base = algebraic_to_symbolic(&binary.left); + let exp = match *binary.right { + AlgebraicExpression::Number(n) => n, + _ => unimplemented!(), + }; + + if exp == T::ZERO { + SymbolicExpression::Constant(T::ONE) + } else { + let mut result = base.clone(); + let mut remaining = exp - T::ONE; + + while remaining != T::ZERO { + result = SymbolicExpression::Mul { + x: Arc::new(result), + y: Arc::new(base.clone()), + degree_multiple: 0, + }; + remaining -= T::ONE; + } + result + } + } + }, + AlgebraicExpression::UnaryOperation(unary) => match unary.op { + AlgebraicUnaryOperator::Minus => SymbolicExpression::Neg { + x: Arc::new(algebraic_to_symbolic(&unary.expr)), + degree_multiple: 0, + }, + }, + AlgebraicExpression::Reference(algebraic_reference) => { + let poly_id = algebraic_reference.poly_id; + let next = algebraic_reference.next as usize; + match poly_id.ptype { + PolynomialType::Committed => SymbolicExpression::Variable(SymbolicVariable::new( + Entry::Main { + part_index: 0, + offset: next, + }, + poly_id.id as usize, + )), + PolynomialType::Constant => SymbolicExpression::Variable(SymbolicVariable::new( + Entry::Preprocessed { offset: next }, + poly_id.id as usize, + )), + PolynomialType::Intermediate => todo!(), + } + } + AlgebraicExpression::PublicReference(_) => { + unimplemented!() + } + AlgebraicExpression::Challenge(ch) => SymbolicExpression::Variable(SymbolicVariable::new( + Entry::Challenge, + ch.id.try_into().unwrap(), + )), + } +} +pub fn symbolic_to_algebraic( + expr: &SymbolicExpression, + columns: &[String], +) -> AlgebraicExpression

{ + match expr { + SymbolicExpression::Constant(c) => { + AlgebraicExpression::Number(P::from_bytes_le(&c.as_canonical_u32().to_le_bytes())) + } + SymbolicExpression::Add { x, y, .. } => { + AlgebraicExpression::BinaryOperation(AlgebraicBinaryOperation { + left: Box::new(symbolic_to_algebraic(x, columns)), + right: Box::new(symbolic_to_algebraic(y, columns)), + op: AlgebraicBinaryOperator::Add, + }) + } + SymbolicExpression::Sub { x, y, .. } => { + AlgebraicExpression::BinaryOperation(AlgebraicBinaryOperation { + left: Box::new(symbolic_to_algebraic(x, columns)), + right: Box::new(symbolic_to_algebraic(y, columns)), + op: AlgebraicBinaryOperator::Sub, + }) + } + SymbolicExpression::Mul { x, y, .. } => { + AlgebraicExpression::BinaryOperation(AlgebraicBinaryOperation { + left: Box::new(symbolic_to_algebraic(x, columns)), + right: Box::new(symbolic_to_algebraic(y, columns)), + op: AlgebraicBinaryOperator::Mul, + }) + } + SymbolicExpression::Neg { x, .. } => { + AlgebraicExpression::UnaryOperation(AlgebraicUnaryOperation { + expr: Box::new(symbolic_to_algebraic(x, columns)), + op: AlgebraicUnaryOperator::Minus, + }) + } + SymbolicExpression::Variable(SymbolicVariable { entry, index, .. }) => match entry { + Entry::Main { offset, part_index } => { + assert_eq!(*part_index, 0); + let next = match offset { + 0 => false, + 1 => true, + _ => unimplemented!(), + }; + let name = columns.get(*index).unwrap_or_else(|| { + panic!("Column index out of bounds: {index}\nColumns: {columns:?}"); + }); + AlgebraicExpression::Reference(AlgebraicReference { + name: name.clone(), + poly_id: PolyID { + id: *index as u64, + ptype: PolynomialType::Committed, + }, + next, + }) + } + _ => unimplemented!(), + }, + SymbolicExpression::IsFirstRow => AlgebraicExpression::Reference(AlgebraicReference { + name: "is_first_row".to_string(), + poly_id: PolyID { + id: 0, + ptype: PolynomialType::Constant, + }, + next: false, + }), + SymbolicExpression::IsLastRow => AlgebraicExpression::Reference(AlgebraicReference { + name: "is_last_row".to_string(), + poly_id: PolyID { + id: 1, + ptype: PolynomialType::Constant, + }, + next: false, + }), + SymbolicExpression::IsTransition => AlgebraicExpression::Reference(AlgebraicReference { + name: "is_transition".to_string(), + poly_id: PolyID { + id: 2, + ptype: PolynomialType::Constant, + }, + next: false, + }), + } +} + +pub fn get_pil( + name: &str, + constraints: &SymbolicConstraints, + columns: &Vec, + public_values: Vec, +) -> String { + let mut pil = format!( + " +namespace {name}; + // Preamble + col fixed is_first_row = [1] + [0]*; + col fixed is_last_row = [0] + [1]*; + col fixed is_transition = [0] + [1]* + [0]; + +" + ); + + let bus_id_to_name = [ + (0, "EXECUTION_BRIDGE"), + (1, "MEMORY"), + (2, "PC_LOOKUP"), + (3, "VARIABLE_RANGE_CHECKER"), + (6, "BITWISE_LOOKUP"), + (7, "TUPLE_RANGE_CHECKER"), + ] + .into_iter() + .collect::>(); + + pil.push_str( + &bus_id_to_name + .iter() + .map(|(id, name)| format!(" let {name} = {id};")) + .join("\n"), + ); + + pil.push_str( + " + + // Witness columns +", + ); + + // Declare witness columns + for column in columns { + pil.push_str(&format!(" col witness {column};\n")); + } + + let bus_interactions_by_bus = constraints + .interactions + .iter() + .map(|interaction| (interaction.bus_index, interaction)) + .into_group_map() + .into_iter() + // Use BTreeMap to sort by bus_index + .collect::>(); + + pil.push_str( + " + // Bus interactions (bus_index, fields, count)\n", + ); + + for (bus_index, interactions) in bus_interactions_by_bus { + let bus_name = bus_id_to_name + .get(&bus_index) + .unwrap_or_else(|| panic!("Bus index {bus_index} not found in bus_id_to_name")); + + for interaction in interactions { + format_bus_interaction(&mut pil, interaction, columns, &public_values, bus_name); + } + pil.push('\n'); + } + + pil.push_str(" // Constraints\n"); + + for constraint in &constraints.constraints { + pil.push_str(&format!( + " {} = 0;\n", + format_expr(constraint, columns, &public_values) + )); + } + pil +} + +fn format_bus_interaction( + pil: &mut String, + interaction: &Interaction>, + columns: &[String], + public_values: &[String], + bus_name: &str, +) { + let Interaction { message, count, .. } = interaction; + // We do not know what is a send or a receive + let function_name = "bus_interaction"; + + pil.push_str(&format!( + " std::protocols::bus::{}({bus_name}, [{}], {});\n", + function_name, + message + .iter() + .map(|value| format_expr(value, columns, public_values)) + .collect::>() + .join(", "), + format_expr(count, columns, public_values) + )); +} + +fn format_expr( + expr: &SymbolicExpression, + columns: &[String], + // TODO: Implement public references + _public_values: &[String], +) -> String { + symbolic_to_algebraic::<_, BabyBearField>(expr, columns).to_string() +} diff --git a/pipeline/build.rs b/pipeline/build.rs index 7d9625c2e..1c735a18a 100644 --- a/pipeline/build.rs +++ b/pipeline/build.rs @@ -49,10 +49,11 @@ fn build_tests(kind: &str, dir: &str, sub_dir: &str, name: &str) { .strip_suffix(&format!(".{kind}")) { println!("cargo:rerun-if-changed={full_dir}/{relative_name}"); - let ignore = SLOW_LIST - .contains(&test_name) - .then_some("#[ignore = \"Too slow\"]") - .unwrap_or_default(); + let ignore = if SLOW_LIST.contains(&test_name) { + "#[ignore = \"Too slow\"]" + } else { + Default::default() + }; write!( test_file, r#" diff --git a/pipeline/src/test_util.rs b/pipeline/src/test_util.rs index 8e3c4b2f5..7c1c2a7f7 100644 --- a/pipeline/src/test_util.rs +++ b/pipeline/src/test_util.rs @@ -169,8 +169,8 @@ pub fn gen_estark_proof_with_backend_variant( let publics: Vec = pipeline .publics() - .iter() - .map(|(_name, v)| v.expect("all publics should be known since we created a proof")) + .values() + .map(|v| v.expect("all publics should be known since we created a proof")) .collect(); pipeline.verify(&proof, &[publics]).unwrap(); @@ -270,8 +270,8 @@ pub fn gen_halo2_proof(pipeline: Pipeline, backend: BackendVariant) let publics: Vec = pipeline .publics() - .iter() - .map(|(_name, v)| v.expect("all publics should be known since we created a proof")) + .values() + .map(|v| v.expect("all publics should be known since we created a proof")) .collect(); pipeline.verify(&proof, &[publics]).unwrap(); @@ -304,8 +304,8 @@ pub fn test_plonky3_with_backend_variant( let publics: Vec = pipeline .publics() - .iter() - .map(|(_name, v)| v.expect("all publics should be known since we created a proof")) + .values() + .map(|v| v.expect("all publics should be known since we created a proof")) .collect(); pipeline.verify(&proof, &[publics.clone()]).unwrap(); @@ -351,8 +351,8 @@ pub fn test_plonky3_pipeline(pipeline: Pipeline) { let publics: Vec = pipeline .publics() - .iter() - .map(|(_name, v)| v.expect("all publics should be known since we created a proof")) + .values() + .map(|v| v.expect("all publics should be known since we created a proof")) .collect(); pipeline.verify(&proof, &[publics.clone()]).unwrap(); diff --git a/pipeline/tests/pil.rs b/pipeline/tests/pil.rs index 3264ca488..fe0ed638c 100644 --- a/pipeline/tests/pil.rs +++ b/pipeline/tests/pil.rs @@ -263,6 +263,7 @@ fn stwo_fixed_columns() { } #[test] +#[should_panic = "The composition polynomial OODS value does not match the trace OODS values"] fn stwo_stage1_publics() { let f = "pil/stage1_publics.pil"; test_stwo_stage1_public( diff --git a/powdr-test/examples/fibonacci/rust-toolchain.toml b/powdr-test/examples/fibonacci/rust-toolchain.toml index 690b698f9..7ee67e848 100644 --- a/powdr-test/examples/fibonacci/rust-toolchain.toml +++ b/powdr-test/examples/fibonacci/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2024-12-17" +channel = "nightly-2025-05-14" diff --git a/powdr-test/examples/serialized-inputs/rust-toolchain.toml b/powdr-test/examples/serialized-inputs/rust-toolchain.toml index 690b698f9..7ee67e848 100644 --- a/powdr-test/examples/serialized-inputs/rust-toolchain.toml +++ b/powdr-test/examples/serialized-inputs/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2024-12-17" +channel = "nightly-2025-05-14" diff --git a/powdr/src/lib.rs b/powdr/src/lib.rs index 2a69b6760..6ec06b3bc 100644 --- a/powdr/src/lib.rs +++ b/powdr/src/lib.rs @@ -254,8 +254,8 @@ impl Session { let pubs: Vec = self .pipeline .publics() - .iter() - .map(|(_, v)| v.unwrap().to_integer().try_into_u32().unwrap()) + .values() + .map(|v| v.unwrap().to_integer().try_into_u32().unwrap()) .collect(); pubs.try_into().expect("There should be exactly 8 publics") } diff --git a/riscv-executor/src/submachines.rs b/riscv-executor/src/submachines.rs index 1b0288e31..da0ce7e9e 100644 --- a/riscv-executor/src/submachines.rs +++ b/riscv-executor/src/submachines.rs @@ -264,7 +264,7 @@ impl SubmachineTrace { fn push_row(&mut self) { self.selectors.values_mut().for_each(|v| v.push(0.into())); self.values - .extend(std::iter::repeat(F::from(0)).take(self.cols.len())); + .extend(std::iter::repeat_n(F::from(0), self.cols.len())); } /// Push a dummy block to the trace. diff --git a/riscv-runtime/rust-toolchain.toml b/riscv-runtime/rust-toolchain.toml index d9ae6e23f..8ebb44f63 100644 --- a/riscv-runtime/rust-toolchain.toml +++ b/riscv-runtime/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf", "riscv32im-risc0-zkvm-elf"] profile = "minimal" diff --git a/riscv-runtime/src/lib.rs b/riscv-runtime/src/lib.rs index db359ba83..fafba522a 100644 --- a/riscv-runtime/src/lib.rs +++ b/riscv-runtime/src/lib.rs @@ -1,6 +1,5 @@ #![no_std] #![feature( - start, alloc_error_handler, maybe_uninit_write_slice, round_char_boundary, diff --git a/riscv/src/continuations.rs b/riscv/src/continuations.rs index c47924823..44ff6ddb5 100644 --- a/riscv/src/continuations.rs +++ b/riscv/src/continuations.rs @@ -48,7 +48,7 @@ fn render_memory_hash(hash: &[F]) -> String { /// # Arguments /// - `pipeline`: The pipeline that should be the starting point for all the chunks. /// - `pipeline_callback`: A function that will be called for each chunk. It will be passed a prepared `pipeline`, -/// with all chunk-specific information set (witness, fixed cols, inputs, optimized pil) +/// with all chunk-specific information set (witness, fixed cols, inputs, optimized pil) /// - `bootloader_inputs`: The inputs to the bootloader and the index of the row at which the shutdown routine /// is supposed to execute, for each chunk, as returned by `rust_continuations_dry_run`. pub fn rust_continuations( diff --git a/riscv/src/lib.rs b/riscv/src/lib.rs index 75f4bfa6f..3bc5db7e3 100644 --- a/riscv/src/lib.rs +++ b/riscv/src/lib.rs @@ -367,7 +367,7 @@ fn build_cargo_command( let mut args: Vec<&OsStr> = as_ref![ OsStr; - "+nightly-2024-08-01", + "+nightly-2025-05-14", "build", "--release", "--target-dir", diff --git a/riscv/tests/riscv_data/affine_256/rust-toolchain.toml b/riscv/tests/riscv_data/affine_256/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/affine_256/rust-toolchain.toml +++ b/riscv/tests/riscv_data/affine_256/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/byte_access/rust-toolchain.toml b/riscv/tests/riscv_data/byte_access/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/byte_access/rust-toolchain.toml +++ b/riscv/tests/riscv_data/byte_access/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/double_word/rust-toolchain.toml b/riscv/tests/riscv_data/double_word/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/double_word/rust-toolchain.toml +++ b/riscv/tests/riscv_data/double_word/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/ec_add/rust-toolchain.toml b/riscv/tests/riscv_data/ec_add/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/ec_add/rust-toolchain.toml +++ b/riscv/tests/riscv_data/ec_add/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/ec_double/rust-toolchain.toml b/riscv/tests/riscv_data/ec_double/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/ec_double/rust-toolchain.toml +++ b/riscv/tests/riscv_data/ec_double/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/evm/rust-toolchain.toml b/riscv/tests/riscv_data/evm/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/evm/rust-toolchain.toml +++ b/riscv/tests/riscv_data/evm/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/features/rust-toolchain.toml b/riscv/tests/riscv_data/features/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/features/rust-toolchain.toml +++ b/riscv/tests/riscv_data/features/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/function_pointer/rust-toolchain.toml b/riscv/tests/riscv_data/function_pointer/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/function_pointer/rust-toolchain.toml +++ b/riscv/tests/riscv_data/function_pointer/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/goldilocks_inverse/rust-toolchain.toml b/riscv/tests/riscv_data/goldilocks_inverse/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/goldilocks_inverse/rust-toolchain.toml +++ b/riscv/tests/riscv_data/goldilocks_inverse/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/halt/rust-toolchain.toml b/riscv/tests/riscv_data/halt/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/halt/rust-toolchain.toml +++ b/riscv/tests/riscv_data/halt/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/many_chunks/rust-toolchain.toml b/riscv/tests/riscv_data/many_chunks/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/many_chunks/rust-toolchain.toml +++ b/riscv/tests/riscv_data/many_chunks/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/many_chunks_memory/rust-toolchain.toml b/riscv/tests/riscv_data/many_chunks_memory/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/many_chunks_memory/rust-toolchain.toml +++ b/riscv/tests/riscv_data/many_chunks_memory/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/modmul_256/rust-toolchain.toml b/riscv/tests/riscv_data/modmul_256/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/modmul_256/rust-toolchain.toml +++ b/riscv/tests/riscv_data/modmul_256/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/output/rust-toolchain.toml b/riscv/tests/riscv_data/output/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/output/rust-toolchain.toml +++ b/riscv/tests/riscv_data/output/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/plonky3_verify/rust-toolchain.toml b/riscv/tests/riscv_data/plonky3_verify/rust-toolchain.toml index 67c0b22b7..ed08ac0c7 100644 --- a/riscv/tests/riscv_data/plonky3_verify/rust-toolchain.toml +++ b/riscv/tests/riscv_data/plonky3_verify/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32im-risc0-zkvm-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/poseidon2_gl_via_coprocessor/rust-toolchain.toml b/riscv/tests/riscv_data/poseidon2_gl_via_coprocessor/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/poseidon2_gl_via_coprocessor/rust-toolchain.toml +++ b/riscv/tests/riscv_data/poseidon2_gl_via_coprocessor/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/poseidon_gl_via_coprocessor/rust-toolchain.toml b/riscv/tests/riscv_data/poseidon_gl_via_coprocessor/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/poseidon_gl_via_coprocessor/rust-toolchain.toml +++ b/riscv/tests/riscv_data/poseidon_gl_via_coprocessor/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/print/rust-toolchain.toml b/riscv/tests/riscv_data/print/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/print/rust-toolchain.toml +++ b/riscv/tests/riscv_data/print/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/read_slice/rust-toolchain.toml b/riscv/tests/riscv_data/read_slice/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/read_slice/rust-toolchain.toml +++ b/riscv/tests/riscv_data/read_slice/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/std_hello_world/rust-toolchain.toml b/riscv/tests/riscv_data/std_hello_world/rust-toolchain.toml index 67c0b22b7..ed08ac0c7 100644 --- a/riscv/tests/riscv_data/std_hello_world/rust-toolchain.toml +++ b/riscv/tests/riscv_data/std_hello_world/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32im-risc0-zkvm-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/sum/rust-toolchain.toml b/riscv/tests/riscv_data/sum/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/sum/rust-toolchain.toml +++ b/riscv/tests/riscv_data/sum/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/sum_serde/rust-toolchain.toml b/riscv/tests/riscv_data/sum_serde/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/sum_serde/rust-toolchain.toml +++ b/riscv/tests/riscv_data/sum_serde/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/sum_serde_in_mem/rust-toolchain.toml b/riscv/tests/riscv_data/sum_serde_in_mem/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/sum_serde_in_mem/rust-toolchain.toml +++ b/riscv/tests/riscv_data/sum_serde_in_mem/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/trivial/rust-toolchain.toml b/riscv/tests/riscv_data/trivial/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/trivial/rust-toolchain.toml +++ b/riscv/tests/riscv_data/trivial/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/two_sums_serde/rust-toolchain.toml b/riscv/tests/riscv_data/two_sums_serde/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/two_sums_serde/rust-toolchain.toml +++ b/riscv/tests/riscv_data/two_sums_serde/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/vec_median/rust-toolchain.toml b/riscv/tests/riscv_data/vec_median/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/vec_median/rust-toolchain.toml +++ b/riscv/tests/riscv_data/vec_median/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/riscv/tests/riscv_data/zero_with_values/rust-toolchain.toml b/riscv/tests/riscv_data/zero_with_values/rust-toolchain.toml index 8583bb317..321edd70d 100644 --- a/riscv/tests/riscv_data/zero_with_values/rust-toolchain.toml +++ b/riscv/tests/riscv_data/zero_with_values/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "nightly-2024-08-01" +channel = "nightly-2025-05-14" targets = ["riscv32imac-unknown-none-elf"] profile = "minimal" diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 690b698f9..7ee67e848 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "nightly-2024-12-17" +channel = "nightly-2025-05-14"