Skip to content

Commit

Permalink
Merge branch 'dev-fri-verifier2' of https://github.com/zksecurity/pumice
Browse files Browse the repository at this point in the history
 into dev-fri-verifier3
  • Loading branch information
jaehunkim committed Oct 7, 2024
2 parents f6fd49e + 1d89996 commit faa7ad6
Show file tree
Hide file tree
Showing 21 changed files with 1,297 additions and 274 deletions.
7 changes: 7 additions & 0 deletions .github/workflows/develop.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
on:
push:
branches:
- '**'
- "!master"

pull_request:
branches:
- '**'
- "!master"

jobs:
Expand All @@ -19,6 +21,11 @@ jobs:
targets: wasm32-unknown-unknown
components: rustfmt, clippy

- name: Run the license check
run: |
cargo install --locked cargo-about
cargo about generate --format json --fail --all-features > /dev/null
- name: Run cargo test
run: cargo test --all-features

Expand Down
5 changes: 5 additions & 0 deletions .github/workflows/master.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@ jobs:
targets: wasm32-unknown-unknown
components: rustfmt, clippy

- name: Run the license check
run: |
cargo install --locked cargo-about
cargo about generate --format json --fail --all-features > /dev/null
- name: Run cargo test
run: cargo test --all-features

Expand Down
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ resolver = "2"

[workspace.dependencies]
anyhow = "1.0.86"
ark-ff = "0.4.2"
ark-poly = "0.4.2"
ark-ff = { git = "https://github.com/jaehunkim/algebra", branch = "v0.4.2-fix" }
ark-poly = { git = "https://github.com/jaehunkim/algebra", branch = "v0.4.2-fix" }
sha3 = "0.10.8"
blake2 = "0.10.6"
hex-literal = "0.4.1"
Expand Down
10 changes: 10 additions & 0 deletions about.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
accepted = [
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"Unicode-DFS-2016"
]
ignore-transitive-dependencies = false
ignore-build-dependencies = false
ignore-dev-dependencies = false
no-clearly-defined = true
1 change: 1 addition & 0 deletions channel/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
name = "channel"
version = "0.1.0"
edition = "2021"
license = "MIT"

[dependencies]
ark-ff.workspace = true
Expand Down
1 change: 1 addition & 0 deletions channel/src/fs_prover_channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ impl<F: PrimeField, P: Prng, W: Digest> ProverChannel for FSProverChannel<F, P,
// let big_int = felem.div(self.mont_r_inv).into_bigint();
let big_int = felem.into_bigint();
let bytes = big_int.to_bytes_be();
// println!("raw_bytes: {:?}", bytes);
raw_bytes.extend_from_slice(&bytes);
}
self.send_bytes(&raw_bytes)?;
Expand Down
7 changes: 4 additions & 3 deletions commitment_scheme/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
name = "commitment_scheme"
version = "0.1.0"
edition = "2021"
license = "MIT"

[dependencies]
ark-ff.workspace = true
Expand All @@ -10,10 +11,10 @@ sha3.workspace = true
poseidon = { path = "../poseidon" }
felt = { path = "../felt" }
channel = { path = "../channel" }
randomness ={ path = "../randomness" }
randomness = { path = "../randomness" }
anyhow.workspace = true
num-bigint.workspace = true

[dev-dependencies]
hex.workspace = true
rand.workspace = true
hex.workspace = true
rand.workspace = true
43 changes: 34 additions & 9 deletions commitment_scheme/src/table_prover.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use crate::make_commitment_scheme_prover;
use crate::table_utils::{all_query_rows, elements_to_be_transmitted, RowCol};
use crate::CommitmentSchemeProver;
use crate::{CommitmentHashes, CommitmentSchemeProver};
use ark_ff::{BigInteger, PrimeField};
use channel::fs_prover_channel::FSProverChannel;
use channel::ProverChannel;
Expand All @@ -13,19 +14,38 @@ pub struct TableProver<F: PrimeField, P: Prng, W: Digest> {
data_queries: BTreeSet<RowCol>,
integrity_queries: BTreeSet<RowCol>,
all_query_rows: BTreeSet<usize>,
mont_r: F,
}

impl<F: PrimeField, P: Prng, W: Digest> TableProver<F, P, W> {
impl<F: PrimeField, P: Prng + Clone + 'static, W: Digest + Clone + 'static> TableProver<F, P, W> {
pub fn new(
n_segments: usize,
n_rows_per_segment: usize,
n_columns: usize,
commitment_scheme: Box<dyn CommitmentSchemeProver<F, P, W>>,
field_element_size_in_bytes: usize,
n_verifier_friendly_commitment_layers: usize,
commitment_hashes: CommitmentHashes,
mont_r: F,
) -> Self {
let size_of_row = field_element_size_in_bytes * n_columns;

let commitment_scheme: Box<dyn CommitmentSchemeProver<F, P, W>> =
make_commitment_scheme_prover(
size_of_row,
n_rows_per_segment,
n_segments,
n_verifier_friendly_commitment_layers,
commitment_hashes,
n_columns,
);

Self {
n_columns,
commitment_scheme,
data_queries: BTreeSet::new(),
integrity_queries: BTreeSet::new(),
all_query_rows: BTreeSet::new(),
mont_r,
}
}

Expand All @@ -41,9 +61,10 @@ impl<F: PrimeField, P: Prng, W: Digest> TableProver<F, P, W> {
"segment length is expected to be equal to the number of columns"
);

let serialised_segment = serialize_field_columns(segment, self.mont_r);
let _ = &self
.commitment_scheme
.add_segment_for_commitment(&serialize_field_columns(segment), segment_idx);
.add_segment_for_commitment(&serialised_segment, segment_idx);
}

pub fn commit(&mut self, channel: &mut FSProverChannel<F, P, W>) -> Result<(), anyhow::Error> {
Expand Down Expand Up @@ -120,14 +141,17 @@ impl<F: PrimeField, P: Prng, W: Digest> TableProver<F, P, W> {

if let Some(&to_transmit_loc) = to_transmit_it.next() {
assert!(to_transmit_loc == query_loc);
channel.send_felts(&[data[i]])?;
let data_mont = data[i] * self.mont_r;
channel.send_felts(&[data_mont])?;
}
}
}
}

self.commitment_scheme
.decommit(&serialize_field_columns(&elements_data_last_rows), channel)?;
self.commitment_scheme.decommit(
&serialize_field_columns(&elements_data_last_rows, self.mont_r),
channel,
)?;

Ok(())
}
Expand All @@ -146,7 +170,7 @@ fn verify_all_columns_same_length<FieldElementT>(columns: &[Vec<FieldElementT>])
columns.iter().all(|column| column.len() == n_rows)
}

fn serialize_field_columns<F: PrimeField>(segment: &[Vec<F>]) -> Vec<u8> {
pub fn serialize_field_columns<F: PrimeField>(segment: &[Vec<F>], mont_r: F) -> Vec<u8> {
let columns = segment;

assert!(
Expand All @@ -163,7 +187,8 @@ fn serialize_field_columns<F: PrimeField>(segment: &[Vec<F>]) -> Vec<u8> {

for row in 0..n_rows {
for col_data in columns.iter().take(n_columns) {
serialization.extend_from_slice(&col_data[row].into_bigint().to_bytes_be());
let data_mont = col_data[row] * mont_r;
serialization.extend_from_slice(&data_mont.into_bigint().to_bytes_be());
}
}

Expand Down
Loading

0 comments on commit faa7ad6

Please sign in to comment.