Skip to content

Commit

Permalink
chore: refactor create proof method (#77)
Browse files Browse the repository at this point in the history
* draft alt design for create-proof

* fix clippy

Signed-off-by: Kevaundray Wedderburn <[email protected]>

---------

Signed-off-by: Kevaundray Wedderburn <[email protected]>
  • Loading branch information
kevaundray authored Jan 31, 2024
1 parent 52f6b9a commit 47e422a
Showing 1 changed file with 94 additions and 0 deletions.
94 changes: 94 additions & 0 deletions ffi_interface/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,100 @@ pub fn create_proof(input: Vec<u8>) -> Vec<u8> {
proof.to_bytes().unwrap()
}

// This is an alternative implementation of create_proof
pub fn create_proof_alt(input: Vec<u8>) -> Vec<u8> {
// - Checks for the serialized proof queries
///
// Define the chunk size (8257 bytes)
// C_i, f_i(X), z_i, y_i
// 32, 8192, 1, 32
// = 8257
const CHUNK_SIZE: usize = 8257; // TODO: get this from ipa-multipoint

if input.len() % CHUNK_SIZE != 0 {
// TODO: change this to an error
panic!("Input length must be a multiple of {}", CHUNK_SIZE);
}
let num_proofs = input.len() / CHUNK_SIZE;

let proofs_bytes = input.chunks_exact(CHUNK_SIZE);
assert!(
proofs_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the proof"
);

// - Deserialize proof queries
//
let mut prover_queries: Vec<ProverQuery> = Vec::with_capacity(num_proofs);

for proof_bytes in proofs_bytes {
let prover_query = deserialize_proof_query(proof_bytes);
prover_queries.push(prover_query);
}

// - Create proofs
//
// TODO: This should be passed in as a pointer
let precomp = PrecomputedWeights::new(256);

let crs = CRS::default();
let mut transcript = Transcript::new(b"verkle");
// TODO: This should not need to clone the CRS, but instead take a reference

let proof = MultiPoint::open(crs.clone(), &precomp, &mut transcript, prover_queries);
proof.to_bytes().expect("cannot serialize proof")
}

#[must_use]
fn deserialize_proof_query(bytes: &[u8]) -> ProverQuery {
// Commitment
let (commitment, mut bytes) = take_group_element(bytes);

// f_x is a polynomial of degree 255, so we have 256 Fr elements
const NUMBER_OF_EVALUATIONS: usize = 256;
let mut collect_lagrange_basis: Vec<Fr> = Vec::with_capacity(NUMBER_OF_EVALUATIONS);
for _ in 0..NUMBER_OF_EVALUATIONS {
let (scalar, offsetted_bytes) = take_scalar(bytes);
collect_lagrange_basis.push(scalar);
bytes = offsetted_bytes;
}

// The input point is a single byte
let (z_i, bytes) = take_byte(bytes);

// The evaluation is a single scalar
let (y_i, bytes) = take_scalar(bytes);

assert!(bytes.is_empty(), "we should have consumed all the bytes");

ProverQuery {
commitment,
poly: LagrangeBasis::new(collect_lagrange_basis),
point: z_i,
result: y_i,
}
}

#[must_use]
fn take_group_element(bytes: &[u8]) -> (Element, &[u8]) {
let element = Element::from_bytes(&bytes[0..32]).expect("could not deserialize element");
// Increment the slice by 32 bytes
(element, &bytes[32..])
}

#[must_use]
fn take_byte(bytes: &[u8]) -> (usize, &[u8]) {
let z_i = bytes[0] as usize;
// Increment the slice by 32 bytes
(z_i, &bytes[1..])
}
#[must_use]
fn take_scalar(bytes: &[u8]) -> (Fr, &[u8]) {
let y_i = fr_from_le_bytes(&bytes[0..32]).expect("could not deserialize y_i");
// Increment the slice by 32 bytes
(y_i, &bytes[32..])
}

#[cfg(test)]
mod tests {
use banderwagon::Fr;
Expand Down

0 comments on commit 47e422a

Please sign in to comment.