Skip to content

Commit

Permalink
Merge pull request #2 from jjeangal/bindings
Browse files Browse the repository at this point in the history
Bindings Update with latest rust changes
  • Loading branch information
tanishqjasoria authored Aug 13, 2024
2 parents 0c60651 + 8da987e commit 4960905
Show file tree
Hide file tree
Showing 5 changed files with 264 additions and 6 deletions.
122 changes: 117 additions & 5 deletions bindings/c/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
use ffi_interface::{
deserialize_proof_query, deserialize_verifier_query, fr_from_le_bytes, get_tree_key_hash,
Context,
deserialize_proof_query, deserialize_proof_query_uncompressed, deserialize_verifier_query,
deserialize_verifier_query_uncompressed, fr_from_le_bytes, get_tree_key_hash, Context,

Check failure on line 3 in bindings/c/src/lib.rs

View workflow job for this annotation

GitHub Actions / Lints

use of deprecated function `ffi_interface::get_tree_key_hash`: moving forward one should implement this method on the caller side

Check warning on line 3 in bindings/c/src/lib.rs

View workflow job for this annotation

GitHub Actions / Check

use of deprecated function `ffi_interface::get_tree_key_hash`: moving forward one should implement this method on the caller side

Check warning on line 3 in bindings/c/src/lib.rs

View workflow job for this annotation

GitHub Actions / Test Suite

use of deprecated function `ffi_interface::get_tree_key_hash`: moving forward one should implement this method on the caller side
};
use ipa_multipoint::committer::{Committer, DefaultCommitter};
use ipa_multipoint::crs::CRS;
use ipa_multipoint::lagrange_basis::PrecomputedWeights;
use ipa_multipoint::committer::Committer;
use ipa_multipoint::multiproof::{MultiPoint, MultiPointProof, ProverQuery, VerifierQuery};
use ipa_multipoint::transcript::Transcript;

Expand Down Expand Up @@ -134,6 +132,63 @@ pub extern "C" fn create_proof(ctx: *mut Context, input: *const u8, len: usize,
}
}

#[no_mangle]
pub extern "C" fn create_proof_uncompressed(
ctx: *mut Context,
input: *const u8,
len: usize,
out: *mut u8,
) {
// 8257 + 32 because first commitment is uncompressed as 64 bytes
const CHUNK_SIZE: usize = 8289; // TODO: get this from ipa-multipoint
const PROOF_SIZE: usize = 1120; // TODO: get this from ipa-multipoint

let (scalar_slice, context) = unsafe {
let scalar = std::slice::from_raw_parts(input, len);
let ctx_ref = &*ctx;

(scalar, ctx_ref)
};

let num_openings = len / CHUNK_SIZE;

let proofs_bytes = scalar_slice.chunks_exact(CHUNK_SIZE);
assert!(
proofs_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the proof"
);

// - Deserialize proof queries
//
let mut prover_queries: Vec<ProverQuery> = Vec::with_capacity(num_openings);

for proof_bytes in proofs_bytes {
let prover_query = deserialize_proof_query_uncompressed(proof_bytes);
prover_queries.push(prover_query);
}

// - Create proofs
//

let mut transcript = Transcript::new(b"verkle");

let proof = MultiPoint::open(
// TODO: This should not need to clone the CRS, but instead take a reference
context.crs.clone(),
&context.precomputed_weights,
&mut transcript,
prover_queries,
);

let hash = proof
.to_bytes_uncompressed()
.expect("cannot serialize proof");
unsafe {
let commitment_data_slice = std::slice::from_raw_parts_mut(out, PROOF_SIZE);
commitment_data_slice.copy_from_slice(&hash);
}
}

#[no_mangle]
pub extern "C" fn verify_proof(ctx: *mut Context, input: *const u8, len: usize) -> bool {
const CHUNK_SIZE: usize = 65; // TODO: get this from ipa-multipoint
Expand Down Expand Up @@ -185,3 +240,60 @@ pub extern "C" fn verify_proof(ctx: *mut Context, input: *const u8, len: usize)

return is_valid;
}

#[no_mangle]
pub extern "C" fn verify_proof_uncompressed(
ctx: *mut Context,
input: *const u8,
len: usize,
) -> bool {
// Chunk is now 65 + 32 = 97 because first commitment is uncompressed as 64 bytes
const CHUNK_SIZE: usize = 97; // TODO: get this from ipa-multipoint
const PROOF_SIZE: usize = 1120; // TODO: get this from ipa-multipoint

let (proof_slice, verifier_queries_slices, context) = unsafe {
let input_slice = std::slice::from_raw_parts(input, len);

let (proof_slice, verifier_queries_slices) = input_slice.split_at(PROOF_SIZE);

let ctx_ref = &*ctx;

(proof_slice, verifier_queries_slices, ctx_ref)
};

let verifier_queries_bytes = verifier_queries_slices.chunks_exact(CHUNK_SIZE);
assert!(
verifier_queries_bytes.remainder().is_empty(),
"There should be no left over bytes when chunking the verifier queries"
);

let num_openings = verifier_queries_bytes.len() / CHUNK_SIZE;

// - Deserialize verifier queries
//

let mut verifier_queries: Vec<VerifierQuery> = Vec::with_capacity(num_openings);

for verifier_query_bytes in verifier_queries_bytes {
let verifier_query = deserialize_verifier_query_uncompressed(verifier_query_bytes);
verifier_queries.push(verifier_query);
}

// - Check proof
//

let proof = MultiPointProof::from_bytes_unchecked_uncompressed(proof_slice, 256).unwrap();

let mut transcript = Transcript::new(b"verkle");

// TODO: This should not need to clone the CRS, but instead take a reference
let is_valid = MultiPointProof::check(
&proof,
&context.crs.clone(),
&context.precomputed_weights,
&verifier_queries,
&mut transcript,
);

return is_valid;
}
5 changes: 4 additions & 1 deletion ffi_interface/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@ use ipa_multipoint::transcript::Transcript;
pub use serialization::{fr_from_le_bytes, fr_to_le_bytes};
use verkle_trie::proof::golang_proof_format::{bytes32_to_element, hex_to_bytes32, VerkleProofGo};

pub use crate::serialization::{deserialize_proof_query, deserialize_verifier_query};
pub use crate::serialization::{
deserialize_proof_query, deserialize_proof_query_uncompressed, deserialize_verifier_query,
deserialize_verifier_query_uncompressed,
};

/// Context holds all of the necessary components needed for cryptographic operations
/// in the Verkle Trie. This includes:
Expand Down
61 changes: 61 additions & 0 deletions ffi_interface/src/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,36 @@ pub fn deserialize_proof_query(bytes: &[u8]) -> ProverQuery {
}
}

#[must_use]
pub fn deserialize_proof_query_uncompressed(bytes: &[u8]) -> ProverQuery {
// Commitment
let (commitment, mut bytes) = take_uncompressed_group_element(bytes);

// f_x is a polynomial of degree 255, so we have 256 Fr elements
const NUMBER_OF_EVALUATIONS: usize = 256;
let mut collect_lagrange_basis: Vec<Fr> = Vec::with_capacity(NUMBER_OF_EVALUATIONS);
for _ in 0..NUMBER_OF_EVALUATIONS {
let (scalar, offsetted_bytes) = take_scalar(bytes);
collect_lagrange_basis.push(scalar);
bytes = offsetted_bytes;
}

// The input point is a single byte
let (z_i, bytes) = take_byte(bytes);

// The evaluation is a single scalar
let (y_i, bytes) = take_scalar(bytes);

assert!(bytes.is_empty(), "we should have consumed all the bytes");

ProverQuery {
commitment,
poly: LagrangeBasis::new(collect_lagrange_basis),
point: z_i,
result: y_i,
}
}

#[must_use]
pub fn deserialize_verifier_query(bytes: &[u8]) -> VerifierQuery {
// Commitment
Expand All @@ -131,6 +161,37 @@ pub fn deserialize_verifier_query(bytes: &[u8]) -> VerifierQuery {
}
}

#[must_use]
pub fn deserialize_verifier_query_uncompressed(bytes: &[u8]) -> VerifierQuery {
// Commitment
let (commitment, bytes) = take_uncompressed_group_element(bytes);

// The input point is a single byte
let (z_i, bytes) = take_byte(bytes);

// The evaluation is a single scalar
let (y_i, bytes) = take_scalar(bytes);

assert!(bytes.is_empty(), "we should have consumed all the bytes");

VerifierQuery {
commitment,
point: Fr::from(z_i as u128),
result: y_i,
}
}

#[must_use]
pub(crate) fn take_uncompressed_group_element(bytes: &[u8]) -> (Element, &[u8]) {
let commitment: CommitmentBytes = bytes[..64]
.try_into()
.expect("Expected a slice of exactly 64 bytes");

let element = Element::from_bytes_unchecked_uncompressed(commitment);
// Increment the slice by 64 bytes
(element, &bytes[64..])
}

#[must_use]
pub(crate) fn take_group_element(bytes: &[u8]) -> (Element, &[u8]) {
let element = Element::from_bytes(&bytes[0..32]).expect("could not deserialize element");
Expand Down
57 changes: 57 additions & 0 deletions ipa-multipoint/src/ipa.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ impl IPAProof {
pub(crate) fn serialized_size(&self) -> usize {
(self.L_vec.len() * 2 + 1) * 32
}
// might be : self.L_vec.len() * 2 * 64 + 32 or something similar
pub(crate) fn uncompressed_size(&self) -> usize {
(self.L_vec.len() * 2 * 64) + 32
}
pub fn from_bytes(bytes: &[u8], poly_degree: usize) -> IOResult<IPAProof> {
// Given the polynomial degree, we will have log2 * 2 points
let num_points = log2(poly_degree);
Expand Down Expand Up @@ -58,6 +62,43 @@ impl IPAProof {

Ok(IPAProof { L_vec, R_vec, a })
}
pub fn from_bytes_unchecked_uncompressed(
bytes: &[u8],
poly_degree: usize,
) -> IOResult<IPAProof> {
// Given the polynomial degree, we will have log2 * 2 points
let num_points = log2(poly_degree);
let mut L_vec = Vec::with_capacity(num_points as usize);
let mut R_vec = Vec::with_capacity(num_points as usize);

assert_eq!(((num_points * 2) * 64) + 32, bytes.len() as u32);

let (points_bytes, a_bytes) = bytes.split_at(bytes.len() - 32);

assert!(a_bytes.len() == 32);

// Chunk the byte slice into 64 bytes
let mut chunks = points_bytes.chunks_exact(64);

for _ in 0..num_points {
let chunk = chunks.next().unwrap();
let L_bytes: [u8; 64] = chunk.try_into().unwrap();
let point: Element = Element::from_bytes_unchecked_uncompressed(L_bytes);
L_vec.push(point)
}

for _ in 0..num_points {
let chunk = chunks.next().unwrap();
let R_bytes: [u8; 64] = chunk.try_into().unwrap();
let point: Element = Element::from_bytes_unchecked_uncompressed(R_bytes);
R_vec.push(point)
}

let a: Fr = CanonicalDeserialize::deserialize_compressed(a_bytes)
.map_err(|_| IOError::from(IOErrorKind::InvalidData))?;

Ok(IPAProof { L_vec, R_vec, a })
}
pub fn to_bytes(&self) -> IOResult<Vec<u8>> {
// We do not serialize the length. We assume that the deserializer knows this.
let mut bytes = Vec::with_capacity(self.serialized_size());
Expand All @@ -75,6 +116,22 @@ impl IPAProof {
.map_err(|_| IOError::from(IOErrorKind::InvalidData))?;
Ok(bytes)
}
pub fn to_bytes_uncompressed(&self) -> IOResult<Vec<u8>> {
let mut bytes = Vec::with_capacity(self.uncompressed_size());

for L in &self.L_vec {
bytes.extend(L.to_bytes_uncompressed());
}

for R in &self.R_vec {
bytes.extend(R.to_bytes_uncompressed());
}

self.a
.serialize_uncompressed(&mut bytes)
.map_err(|_| IOError::from(IOErrorKind::InvalidData))?;
Ok(bytes)
}
}

pub fn create(
Expand Down
25 changes: 25 additions & 0 deletions ipa-multipoint/src/multiproof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,13 +186,38 @@ impl MultiPointProof {
g_x_comm,
})
}

pub fn from_bytes_unchecked_uncompressed(
bytes: &[u8],
poly_degree: usize,
) -> crate::IOResult<MultiPointProof> {
let g_x_comm_bytes: [u8; 64] = bytes[..64]
.try_into()
.expect("Expected a slice of exactly 64 bytes");
let ipa_bytes = &bytes[64..]; // TODO: we should return a Result here incase the user gives us bad bytes

let g_x_comm = Element::from_bytes_unchecked_uncompressed(g_x_comm_bytes);

let open_proof = IPAProof::from_bytes_unchecked_uncompressed(ipa_bytes, poly_degree)?;
Ok(MultiPointProof {
open_proof,
g_x_comm,
})
}
pub fn to_bytes(&self) -> crate::IOResult<Vec<u8>> {
let mut bytes = Vec::with_capacity(self.open_proof.serialized_size() + 32);
bytes.extend(self.g_x_comm.to_bytes());

bytes.extend(self.open_proof.to_bytes()?);
Ok(bytes)
}
pub fn to_bytes_uncompressed(&self) -> crate::IOResult<Vec<u8>> {
let mut bytes = Vec::with_capacity(self.open_proof.uncompressed_size() + 64);
bytes.extend(self.g_x_comm.to_bytes_uncompressed());

bytes.extend(self.open_proof.to_bytes_uncompressed()?);
Ok(bytes)
}
}

impl MultiPointProof {
Expand Down

0 comments on commit 4960905

Please sign in to comment.