Merge pull request #124 from chriseth/block_machines

Block machines.
This commit is contained in:
chriseth
2023-04-11 21:24:15 +02:00
committed by GitHub
7 changed files with 670 additions and 17 deletions

View File

@@ -119,7 +119,6 @@ pub fn determine_global_constraints<'a>(
// It allows us to completely remove some lookups.
let mut full_span = BTreeSet::new();
for (&name, &values) in &fixed_data.fixed_cols {
println!("constr for {name}");
if let Some((cons, full)) = process_fixed_column(values) {
assert!(known_constraints.insert(name, cons).is_none());
if full {
@@ -128,12 +127,6 @@ pub fn determine_global_constraints<'a>(
}
}
//if fixed_data.verbose {
println!("Determined the following bit constraints on fixed columns:");
for (name, con) in &known_constraints {
println!(" {name}: {con}");
}
let mut retained_identities = vec![];
let mut removed_identities = vec![];
for identity in identities {
@@ -149,7 +142,11 @@ pub fn determine_global_constraints<'a>(
}
if fixed_data.verbose {
println!("Determined the following identities to be bit/range constraints:");
println!("Determined the following global bit constraints:");
for (name, con) in &known_constraints {
println!(" {name}: {con}");
}
println!("Determined the following identities to be purely bit/range constraints:");
for id in removed_identities {
println!(" {id}");
}

View File

@@ -0,0 +1,566 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use itertools::Itertools;
use super::{EvalResult, FixedData, FixedLookup};
use crate::analyzer::{Expression, Identity, IdentityKind, SelectedExpressions};
use crate::number::{AbstractNumberType, DegreeType};
use crate::witness_generator::eval_error;
use crate::witness_generator::{
affine_expression::AffineExpression,
bit_constraints::{BitConstraint, BitConstraintSet},
eval_error::EvalError,
expression_evaluator::ExpressionEvaluator,
machines::Machine,
symbolic_witness_evaluator::{SymoblicWitnessEvaluator, WitnessColumnEvaluator},
util::{is_simple_poly, WitnessColumnNamer},
Constraint,
};
/// A machine that produces multiple rows (one block) per query.
/// TODO we do not actually "detect" the machine yet, we just check if
/// the lookup has a binary selector that is 1 every k rows for some k > 1
pub struct BlockMachine {
/// Block size, the period of the selector.
block_size: usize,
selector: String,
identities: Vec<Identity>,
/// One column of values for each witness.
data: HashMap<usize, Vec<Option<AbstractNumberType>>>,
/// Current row in the machine
row: DegreeType,
/// Bit constraints, are deleted outside the current block.
bit_constraints: HashMap<usize, HashMap<DegreeType, BitConstraint>>,
/// Global bit constraints on witness columns.
global_bit_constraints: HashMap<usize, BitConstraint>,
/// Number of witnesses (in general, not in this machine).
witness_count: usize,
/// Poly degree / absolute number of rows
degree: DegreeType,
/// Cache that states the order in which to evaluate identities
/// to make progress most quickly.
processing_sequence_cache: ProcessingSequenceCache,
}
impl BlockMachine {
pub fn try_new(
fixed_data: &FixedData,
connecting_identities: &[&Identity],
identities: &[&Identity],
witness_names: &HashSet<&str>,
global_bit_constraints: &BTreeMap<&str, BitConstraint>,
) -> Option<Box<Self>> {
for id in connecting_identities {
if let Some(sel) = &id.right.selector {
// TODO we should check that the other constraints/fixed columns are also periodic.
if let Some((selector, period)) = is_boolean_periodic_selector(sel, fixed_data) {
let mut machine = BlockMachine {
block_size: period,
selector,
identities: identities.iter().map(|&i| i.clone()).collect(),
data: witness_names
.iter()
.map(|n| (fixed_data.witness_ids[n], vec![]))
.collect(),
row: 0,
bit_constraints: Default::default(),
global_bit_constraints: global_bit_constraints
.iter()
.filter_map(|(n, c)| {
fixed_data.witness_ids.get(n).map(|n| (*n, c.clone()))
})
.collect(),
witness_count: fixed_data.witness_cols.len(),
degree: fixed_data.degree,
processing_sequence_cache: ProcessingSequenceCache::new(
period,
identities.len(),
),
};
// Append a block so that we do not have to deal with wrap-around
// when storing machine witness data.
machine.append_new_block(fixed_data.degree).unwrap();
return Some(Box::new(machine));
}
}
}
None
}
}
/// Check if `expr` is a reference to a function of the form
/// f(i) { if (i + 1) % k == 0 { 1 } else { 0 } }
/// for some k >= 2
/// TODO we could make this more generic and only detect the period
/// but not enforce the offset.
fn is_boolean_periodic_selector(
expr: &Expression,
fixed_data: &FixedData,
) -> Option<(String, usize)> {
let poly = is_simple_poly(expr)?;
let values = fixed_data.fixed_cols.get(poly)?;
let period = 1 + values.iter().position(|v| *v == 1.into())?;
if period == 1 {
return None;
}
values
.iter()
.enumerate()
.all(|(i, v)| {
let expected = if (i + 1) % period == 0 {
1.into()
} else {
0.into()
};
*v == expected
})
.then_some((poly.to_string(), period))
}
impl Machine for BlockMachine {
fn process_plookup(
&mut self,
fixed_data: &FixedData,
fixed_lookup: &mut FixedLookup,
kind: IdentityKind,
left: &[Result<AffineExpression, EvalError>],
right: &SelectedExpressions,
) -> Option<EvalResult> {
if is_simple_poly(right.selector.as_ref()?)? != self.selector
|| kind != IdentityKind::Plookup
{
return None;
}
let previous_len = self.rows() as usize;
Some({
let result = self.process_plookup_internal(fixed_data, fixed_lookup, left, right);
self.bit_constraints.clear();
result.map_err(|e| {
// rollback the changes.
for col in self.data.values_mut() {
col.truncate(previous_len)
}
e
})
})
}
fn witness_col_values(
&mut self,
fixed_data: &FixedData,
) -> HashMap<String, Vec<AbstractNumberType>> {
std::mem::take(&mut self.data)
.into_iter()
.map(|(id, values)| {
let mut values = values
.into_iter()
.map(|v| v.unwrap_or_default())
.collect::<Vec<_>>();
values.resize(fixed_data.degree as usize, 0.into());
(fixed_data.name(id), values)
})
.collect()
}
}
impl BlockMachine {
/// Extends the data with a new block.
fn append_new_block(&mut self, max_len: DegreeType) -> Result<(), EvalError> {
if self.rows() + self.block_size as DegreeType >= max_len {
return Err("Rows in block machine exhausted.".to_string().into());
}
for col in self.data.values_mut() {
col.resize_with(col.len() + self.block_size, || None);
}
Ok(())
}
fn rows(&self) -> DegreeType {
self.data.values().next().unwrap().len() as DegreeType
}
fn process_plookup_internal(
&mut self,
fixed_data: &FixedData,
fixed_lookup: &mut FixedLookup,
left: &[Result<AffineExpression, EvalError>],
right: &SelectedExpressions,
) -> EvalResult {
// First check if we already store the value.
if left
.iter()
.all(|v| v.as_ref().ok().map(|v| v.is_constant()) == Some(true))
{
return Ok(vec![]);
// TOOD check that they really exist (maybe just check the last row)
}
let old_len = self.rows();
self.append_new_block(fixed_data.degree)?;
let mut outer_assignments = vec![];
// TODO this assumes we are always using the same lookup for this machine.
let sequence = self.processing_sequence_cache.get_processing_sequence(left);
let mut errors = vec![];
// TODO The error handling currently does not handle contradictions properly.
// If we can find an assignment of all LHS variables at the end, we do not return an error,
// even if there is a conflict.
// Record the steps where we made progress, so we can report this to the
// cache later on.
let mut progress_steps = vec![];
for step in sequence {
let SequenceStep {
row_delta,
identity,
} = step;
self.row =
(old_len as i64 + row_delta + fixed_data.degree as i64) as u64 % fixed_data.degree;
match self.process_identity(fixed_data, fixed_lookup, left, right, identity) {
Ok(result) => {
if !result.is_empty() {
progress_steps.push(step);
errors.clear();
outer_assignments.extend(self.handle_eval_result(result))
}
}
Err(e) => errors.push(format!("In row {}: {e}", self.row).into()),
}
}
// Only succeed if we can assign everything.
// Otherwise it is messy because we have to find the correct block again.
let unknown_variables = left
.iter()
.filter_map(|l| l.as_ref().ok().map(|l| l.nonzero_variables()))
.concat()
.iter()
.cloned()
.collect::<HashSet<_>>();
let value_assignments = outer_assignments
.iter()
.filter_map(|(var, con)| match con {
Constraint::Assignment(_) => Some(*var),
Constraint::BitConstraint(_) => None,
})
.collect::<HashSet<_>>();
if unknown_variables.is_subset(&value_assignments) {
// We solved the query, so report it to the cache.
self.processing_sequence_cache
.report_processing_sequence(left, progress_steps);
Ok(outer_assignments)
} else if !errors.is_empty() {
Err(errors.into_iter().reduce(eval_error::combine).unwrap())
} else {
Err("Could not assign all variables in the query - maybe the machine does not have enough constraints?".to_string().into())
}
}
fn handle_eval_result(&mut self, result: Vec<(usize, Constraint)>) -> Vec<(usize, Constraint)> {
result
.into_iter()
.filter_map(|(poly, constraint)| {
let (poly, next) = self.extract_next(poly);
let r = (self.row + next as u64) % self.degree;
let is_outside_poly = !self.data.contains_key(&poly);
if is_outside_poly {
assert!(!next);
Some((poly, constraint))
} else {
match constraint {
Constraint::Assignment(a) => {
let values = self.data.get_mut(&poly).unwrap();
if r as usize <= values.len() {
// do not write to other rows for now
values[r as usize] = Some(a);
}
}
Constraint::BitConstraint(bc) => {
self.bit_constraints.entry(poly).or_default().insert(r, bc);
}
}
None
}
})
.collect()
}
/// Processes an identity which is either the query or
/// an identity in the vector of identities.
fn process_identity(
&self,
fixed_data: &FixedData,
fixed_lookup: &mut FixedLookup,
left: &[Result<AffineExpression, EvalError>],
right: &SelectedExpressions,
identity: IdentityInSequence,
) -> EvalResult {
match identity {
IdentityInSequence::Internal(index) => {
let id = &self.identities[index];
match id.kind {
IdentityKind::Polynomial => self.process_polynomial_identity(
fixed_data,
id.left.selector.as_ref().unwrap(),
),
IdentityKind::Plookup | IdentityKind::Permutation => {
self.process_plookup(fixed_data, fixed_lookup, id)
}
_ => Err("Unsupported lookup type".to_string().into()),
}
}
IdentityInSequence::OuterQuery => self.process_outer_query(fixed_data, left, right),
}
}
/// Processes the outer query / the plookup. This function should only be called
/// on the acutal query row (the last one of the block).
fn process_outer_query(
&self,
fixed_data: &FixedData,
left: &[Result<AffineExpression, EvalError>],
right: &SelectedExpressions,
) -> EvalResult {
assert!(self.row as usize % self.block_size == self.block_size - 1);
let mut errors = vec![];
let mut results = vec![];
// TODO how to properly hanlde the errors here?
// We only return them if we do not make any progress.
for (l, r) in left.iter().zip(right.expressions.iter()) {
match (l, self.evaluate(fixed_data, r)) {
(Ok(l), Ok(r)) => match (l.clone() - r).solve_with_bit_constraints(self) {
Ok(result) => results.extend(result),
Err(e) => errors.push(e),
},
(Err(e), Ok(_)) => errors.push(e.clone()),
(Ok(_), Err(e)) => errors.push(e),
(Err(e1), Err(e2)) => errors.extend([e1.clone(), e2]),
}
}
if results.is_empty() && !errors.is_empty() {
Err(errors.into_iter().reduce(eval_error::combine).unwrap())
} else {
Ok(results)
}
}
/// Process a polynomial identity internal no the machine.
fn process_polynomial_identity(
&self,
fixed_data: &FixedData,
identity: &Expression,
) -> EvalResult {
let evaluated = self.evaluate(fixed_data, identity)?;
evaluated.solve_with_bit_constraints(self).map_err(|e| {
let witness_data = WitnessData {
fixed_data,
data: &self.data,
row: self.row,
};
let formatted = evaluated.format(&witness_data);
if let EvalError::ConstraintUnsatisfiable(_) = e {
EvalError::ConstraintUnsatisfiable(format!(
"Constraint is invalid ({formatted} != 0)."
))
} else {
format!("Could not solve expression {formatted} = 0: {e}").into()
}
})
}
/// Process a plookup internal to the machine against a set of fixed columns.
fn process_plookup(
&self,
fixed_data: &FixedData,
fixed_lookup: &mut FixedLookup,
identity: &Identity,
) -> EvalResult {
if identity.left.selector.is_some() || identity.right.selector.is_some() {
unimplemented!("Selectors not yet implemented.");
}
let left = identity
.left
.expressions
.iter()
.map(|e| self.evaluate(fixed_data, e))
.collect::<Vec<_>>();
if let Some(result) =
fixed_lookup.process_plookup(fixed_data, identity.kind, &left, &identity.right)
{
result
} else {
Err("Could not find a matching machine for the lookup."
.to_string()
.into())
}
}
fn evaluate(
&self,
fixed_data: &FixedData,
expression: &Expression,
) -> Result<AffineExpression, EvalError> {
ExpressionEvaluator::new(SymoblicWitnessEvaluator::new(
fixed_data,
self.row,
WitnessData {
fixed_data,
data: &self.data,
row: self.row,
},
))
.evaluate(expression)
}
/// Converts a poly ID that might contain a next offset
/// to a regular poly ID plus a boolean signifying "next".
fn extract_next(&self, id: usize) -> (usize, bool) {
extract_next(self.witness_count, id)
}
}
impl BitConstraintSet for BlockMachine {
fn bit_constraint(&self, id: usize) -> Option<BitConstraint> {
let (poly, next) = self.extract_next(id);
self.global_bit_constraints.get(&poly).cloned().or_else(|| {
let row = (self.row + next as u64) % self.degree;
self.bit_constraints.get(&poly)?.get(&row).cloned()
})
}
}
#[derive(Clone)]
struct WitnessData<'a> {
pub fixed_data: &'a FixedData<'a>,
pub data: &'a HashMap<usize, Vec<Option<AbstractNumberType>>>,
pub row: DegreeType,
}
impl<'a> WitnessColumnEvaluator for WitnessData<'a> {
fn value(&self, name: &str, next: bool) -> Result<AffineExpression, EvalError> {
let id = self.fixed_data.witness_ids[name];
let row = if next {
(self.row + 1) % self.fixed_data.degree
} else {
self.row
};
// It is not an error to access the rows outside the block.
// We just return a symbolic ID for those.
match self.data[&id].get(row as usize).cloned().flatten() {
Some(value) => Ok(value.into()),
None => {
let witness_count = self.fixed_data.witness_cols.len();
let symbolic_id = if next { id + witness_count } else { id };
Ok(AffineExpression::from_witness_poly_value(symbolic_id))
}
}
}
}
impl<'a> WitnessColumnNamer for WitnessData<'a> {
fn name(&self, i: usize) -> String {
let (id, next) = extract_next(self.fixed_data.witness_cols.len(), i);
self.fixed_data.name(id) + if next { "\'" } else { "" }
}
}
/// Converts a poly ID that might contain a next offset
/// to a regular poly ID plus a boolean signifying "next".
fn extract_next(witness_count: usize, id: usize) -> (usize, bool) {
if id < witness_count {
(id, false)
} else {
(id - witness_count, true)
}
}
struct ProcessingSequenceCache {
block_size: usize,
identities_count: usize,
cache: BTreeMap<SequenceCacheKey, Vec<SequenceStep>>,
}
#[derive(Clone)]
struct SequenceStep {
row_delta: i64,
identity: IdentityInSequence,
}
#[derive(Clone, Copy)]
enum IdentityInSequence {
Internal(usize),
OuterQuery,
}
#[derive(PartialOrd, Ord, PartialEq, Eq, Debug)]
struct SequenceCacheKey {
known_columns: Vec<bool>,
}
impl From<&[Result<AffineExpression, EvalError>]> for SequenceCacheKey {
fn from(value: &[Result<AffineExpression, EvalError>]) -> Self {
SequenceCacheKey {
known_columns: value
.iter()
.map(|v| {
v.as_ref()
.ok()
.and_then(|ex| ex.is_constant().then_some(true))
.is_some()
})
.collect(),
}
}
}
impl ProcessingSequenceCache {
pub fn new(block_size: usize, identities_count: usize) -> Self {
ProcessingSequenceCache {
block_size,
identities_count,
cache: Default::default(),
}
}
pub fn get_processing_sequence(
&self,
left: &[Result<AffineExpression, EvalError>],
) -> Vec<SequenceStep> {
self.cache.get(&left.into()).cloned().unwrap_or_else(|| {
let block_size = self.block_size as i64;
(-1..=block_size)
.chain((-1..block_size).rev())
.chain(-1..=block_size)
.flat_map(|row_delta| {
let mut identities = (0..self.identities_count)
.map(IdentityInSequence::Internal)
.collect::<Vec<_>>();
if row_delta + 1 == self.block_size as i64 {
// Process the query on the query row.
identities.push(IdentityInSequence::OuterQuery);
}
identities.into_iter().map(move |identity| SequenceStep {
row_delta,
identity,
})
})
.collect()
})
}
pub fn report_processing_sequence(
&mut self,
left: &[Result<AffineExpression, EvalError>],
sequence: Vec<SequenceStep>,
) {
self.cache.entry(left.into()).or_insert(sequence);
}
}

View File

@@ -1,11 +1,14 @@
use std::collections::BTreeMap;
use std::collections::HashSet;
use super::block_machine::BlockMachine;
use super::double_sorted_witness_machine::DoubleSortedWitnesses;
use super::fixed_lookup_machine::FixedLookup;
use super::sorted_witness_machine::SortedWitnesses;
use super::FixedData;
use super::Machine;
use crate::analyzer::{Expression, Identity, SelectedExpressions};
use crate::witness_generator::bit_constraints::BitConstraint;
use crate::witness_generator::WitnessColumn;
/// Finds machines in the witness columns and identities
@@ -13,8 +16,9 @@ use crate::witness_generator::WitnessColumn;
/// that are not "internal" to the machines.
pub fn split_out_machines<'a>(
fixed: &'a FixedData<'a>,
identities: &'a [Identity],
identities: Vec<&'a Identity>,
witness_cols: &'a [WitnessColumn],
global_bit_constraints: &BTreeMap<&'a str, BitConstraint>,
) -> (FixedLookup, Vec<Box<dyn Machine>>, Vec<&'a Identity>) {
let fixed_lookup = FixedLookup::try_new(fixed, &[], &Default::default()).unwrap();
@@ -22,8 +26,8 @@ pub fn split_out_machines<'a>(
let all_witnesses = witness_cols.iter().map(|c| c.name).collect::<HashSet<_>>();
let mut remaining_witnesses = all_witnesses.clone();
let mut base_identities = identities.iter().collect::<Vec<_>>();
for id in identities {
let mut base_identities = identities.clone();
for id in &identities {
// Extract all witness columns in the RHS of the lookup.
let lookup_witnesses = &refs_in_selected_expressions(&id.right) & (&remaining_witnesses);
if lookup_witnesses.is_empty() {
@@ -33,12 +37,12 @@ pub fn split_out_machines<'a>(
// Recursively extend the set to all witnesses connected through identities that preserve
// a fixed row relation.
let machine_witnesses =
all_row_connected_witnesses(lookup_witnesses, &remaining_witnesses, identities);
all_row_connected_witnesses(lookup_witnesses, &remaining_witnesses, &identities);
// Split identities into those that only concern the machine
// witnesses and those that concern any other witness.
let (machine_identities, remaining_identities): (Vec<_>, _) =
base_identities.iter().partition(|i| {
base_identities.iter().cloned().partition(|i| {
// The identity has at least one machine witness, but
// all referenced witnesses are machine witnesses.
let all_refs = &refs_in_identity(i) & (&all_witnesses);
@@ -47,6 +51,17 @@ pub fn split_out_machines<'a>(
base_identities = remaining_identities;
remaining_witnesses = &remaining_witnesses - &machine_witnesses;
let connecting_identities = base_identities
.iter()
.cloned()
.filter(|i| {
refs_in_identity(i)
.intersection(&machine_witnesses)
.next()
.is_some()
})
.collect::<Vec<_>>();
if fixed.verbose {
println!(
"Extracted a machine with the following witnesses and identities:\n{}\n{}",
@@ -77,6 +92,17 @@ pub fn split_out_machines<'a>(
println!("Detected machine: memory");
}
machines.push(machine);
} else if let Some(machine) = BlockMachine::try_new(
fixed,
&connecting_identities,
&machine_identities,
&machine_witnesses,
global_bit_constraints,
) {
if fixed.verbose {
println!("Detected machine: block");
}
machines.push(machine);
} else {
println!(
"Could not find a matching machine to handle a query to the following witness set:\n{}",
@@ -100,7 +126,7 @@ pub fn split_out_machines<'a>(
fn all_row_connected_witnesses<'a>(
mut witnesses: HashSet<&'a str>,
all_witnesses: &HashSet<&'a str>,
identities: &'a [Identity],
identities: &'a [&'a Identity],
) -> HashSet<&'a str> {
loop {
let count = witnesses.len();

View File

@@ -8,6 +8,7 @@ pub use self::fixed_lookup_machine::FixedLookup;
use super::EvalResult;
use super::{affine_expression::AffineExpression, eval_error::EvalError, FixedData};
mod block_machine;
mod double_sorted_witness_machine;
mod fixed_lookup_machine;
pub mod machine_extractor;

View File

@@ -1,4 +1,5 @@
use std::collections::HashMap;
use std::fmt::Display;
use crate::analyzer::{Analyzed, Expression, FunctionValueDefinition};
use crate::number::{AbstractNumberType, DegreeType};
@@ -46,13 +47,14 @@ pub fn generate<'a>(
witness_cols.iter().map(|w| (w.name, w.id)).collect(),
verbose,
);
let (global_bit_constraints, identities) =
bit_constraints::determine_global_constraints(&fixed, analyzed.identities.iter().collect());
let (mut fixed_lookup, machines, identities) = machines::machine_extractor::split_out_machines(
&fixed,
&analyzed.identities,
identities,
&witness_cols,
&global_bit_constraints,
);
let (global_bit_constraints, identities) =
bit_constraints::determine_global_constraints(&fixed, identities);
let mut generator = generator::Generator::new(
&fixed,
&mut fixed_lookup,
@@ -93,6 +95,15 @@ pub enum Constraint {
BitConstraint(BitConstraint),
}
impl Display for Constraint {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Constraint::Assignment(a) => write!(f, " = {a}"),
Constraint::BitConstraint(bc) => write!(f, ":& {bc}"),
}
}
}
/// Data that is fixed for witness generation.
pub struct FixedData<'a> {
degree: DegreeType,

View File

@@ -70,3 +70,8 @@ fn test_witness_lookup() {
fn test_pair_lookup() {
verify_pil("pair_lookup.pil", None);
}
#[test]
fn test_block_lookup_or() {
verify_pil("block_lookup_or.pil", None);
}

View File

@@ -0,0 +1,47 @@
constant %N = 65536;
// ORs two 32-bit numbers, byte-by-byte.
namespace Or(%N);
macro is_nonzero(X) {
match X {
0 => 0,
_ => 1,
}
};
macro is_zero(X) { 1 - is_nonzero(X) };
col fixed RESET(i) { is_zero((i % 4) - 3) };
col fixed FACTOR(i) { 1 << (((i + 1) % 4) * 8) };
col fixed P_A(i) { i % 256 };
col fixed P_B(i) { (i >> 8) % 256 };
col fixed P_C(i) { (P_A(i) | P_B(i)) & 0xff };
// ROW RESET FACTOR
// 0 0 1 << 4
// 1 0 1 << 8
// 2 0 1 << 12
// 3 1 1 << 0
col witness A_byte;
col witness B_byte;
col witness C_byte;
col witness A;
col witness B;
col witness C;
A' = A * (1 - RESET) + A_byte * FACTOR;
B' = B * (1 - RESET) + B_byte * FACTOR;
C' = C * (1 - RESET) + C_byte * FACTOR;
{A_byte, B_byte, C_byte} in {P_A, P_B, P_C};
namespace Main(%N);
col fixed a(i) { (i + 13) & 0xffffffff };
col fixed b(i) { ((i + 19) * 17) & 0xffffffff };
col witness c;
col fixed NTH(i) { is_zero(i % 32) };
NTH {a, b, c} in Or.RESET {Or.A, Or.B, Or.C};