Compare commits

..

6 Commits

Author SHA1 Message Date
dante
0ecee16120 Merge branch 'main' into ac/gen-rand-data 2025-01-09 00:13:50 +00:00
dante
8ba4a853ad Update execute.rs 2025-01-08 15:44:18 +00:00
dante
343cdf21fb Update execute.rs 2025-01-08 15:27:09 +00:00
dante
8d623e59ef fix: wasm and ios compiles 2025-01-08 15:26:58 +00:00
dante
6096df7853 Update linear_regression.ipynb 2025-01-08 15:22:22 +00:00
dante
adb9bf49f4 feat: add gen-random-data helpers func 2025-01-08 15:19:59 +00:00
7 changed files with 24 additions and 28 deletions

View File

@@ -6,6 +6,17 @@ on:
description: "Test scenario tags"
jobs:
bench_elgamal:
runs-on: self-hosted
steps:
- uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2023-06-27
override: true
components: rustfmt, clippy
- name: Bench elgamal
run: cargo bench --verbose --bench elgamal
bench_poseidon:
runs-on: self-hosted

View File

@@ -75,16 +75,6 @@ impl FromStr for CheckMode {
}
}
impl CheckMode {
/// Returns the value of the check mode
pub fn is_safe(&self) -> bool {
match self {
CheckMode::SAFE => true,
CheckMode::UNSAFE => false,
}
}
}
#[allow(missing_docs)]
/// An enum representing the tolerance we can accept for the accumulated arguments, either absolute or percentage
#[derive(Clone, Default, Debug, PartialEq, PartialOrd, Serialize, Deserialize, Copy)]

View File

@@ -4038,7 +4038,7 @@ pub(crate) fn range_check<F: PrimeField + TensorType + PartialOrd + std::hash::H
}
let is_assigned = !w.any_unknowns()?;
if is_assigned && region.check_range() && config.check_mode.is_safe() {
if is_assigned && region.check_range() {
// assert is within range
let int_values = w.int_evals()?;
for v in int_values.iter() {

View File

@@ -281,7 +281,7 @@ impl GraphWitness {
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, file);
let witness: GraphWitness =
serde_json::from_reader(reader).map_err(Into::<GraphError>::into)?;
serde_json::from_reader(reader).map_err(|e| Into::<GraphError>::into(e))?;
// check versions match
crate::check_version_string_matches(witness.version.as_deref().unwrap_or(""));

View File

@@ -630,7 +630,9 @@ impl Model {
let mut model = tract_onnx::onnx().model_for_read(reader)?;
let variables: std::collections::HashMap<String, usize> =
std::collections::HashMap::from_iter(variables.iter().map(|(k, v)| (k.clone(), *v)));
std::collections::HashMap::from_iter(
variables.into_iter().map(|(k, v)| (k.clone(), *v)),
);
for (i, id) in model.clone().inputs.iter().enumerate() {
let input = model.node_mut(id.node);

View File

@@ -558,7 +558,7 @@ impl VarTensor {
// duplicates every nth element to adjust for column overflow
let v = v.duplicate_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();
let mut res: ValTensor<F> =
let mut res: ValTensor<F> = {
v.enum_map(|coord, k| {
let step = self.num_inner_cols();
@@ -579,18 +579,12 @@ impl VarTensor {
prev_cell = Some(cell.clone());
} else if coord > 0 && at_beginning_of_column {
if let Some(prev_cell) = prev_cell.as_ref() {
let cell = if let Some(cell) = cell.cell() {
cell
} else {
let cell = cell.cell().ok_or({
error!("Error getting cell: {:?}", (x,y));
return Err(halo2_proofs::plonk::Error::Synthesis);
};
let prev_cell = if let Some(prev_cell) = prev_cell.cell() {
prev_cell
} else {
error!("Error getting prev cell: {:?}", (x,y));
return Err(halo2_proofs::plonk::Error::Synthesis);
};
halo2_proofs::plonk::Error::Synthesis})?;
let prev_cell = prev_cell.cell().ok_or({
error!("Error getting cell: {:?}", (x,y));
halo2_proofs::plonk::Error::Synthesis})?;
region.constrain_equal(prev_cell,cell)?;
} else {
error!("Previous cell was not set");
@@ -600,8 +594,7 @@ impl VarTensor {
Ok(cell)
})?.into();
})?.into()};
let total_used_len = res.len();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();

File diff suppressed because one or more lines are too long