diff --git a/.github/workflows/release-java-bindings.yml b/.github/workflows/release-java-bindings.yml
index 9bcafbb..7a65cb6 100644
--- a/.github/workflows/release-java-bindings.yml
+++ b/.github/workflows/release-java-bindings.yml
@@ -27,6 +27,22 @@ concurrency:
cancel-in-progress: true
jobs:
+ format:
+ name: Check Formatting
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ inputs.ref || github.ref }}
+ - name: Set up JDK
+ uses: actions/setup-java@v3
+ with:
+ distribution: 'temurin'
+ java-version: '11'
+ - name: Check formatting
+ run: ./gradlew spotlessCheck
+ working-directory: bindings/java/java_code
+
build:
name: Build - ${{ matrix.target }}
runs-on: ${{ matrix.os }}
diff --git a/bindings/java/java_code/build.gradle b/bindings/java/java_code/build.gradle
index fe4abe4..820673d 100644
--- a/bindings/java/java_code/build.gradle
+++ b/bindings/java/java_code/build.gradle
@@ -10,10 +10,35 @@ plugins {
group = 'io.github.crate-crypto'
version = '0.0.3'
+spotless {
+ java {
+ // Apply Google's Java formatting style
+ googleJavaFormat()
+
+ // Remove unused imports
+ importOrder()
+ removeUnusedImports()
+
+ // Enforce Unix line endings
+ endWithNewline()
+ indentWithSpaces(4)
+ trimTrailingWhitespace()
+ }
+
+ // Use a simpler formatter for Gradle files
+ groovy {
+ target '*.gradle'
+ // Basic formatting rules instead of Eclipse formatter
+ // which seems to have issues on arm64-linux
+ indentWithSpaces(4)
+ trimTrailingWhitespace()
+ endWithNewline()
+ }
+}
java {
- withJavadocJar()
- withSourcesJar()
+ withJavadocJar()
+ withSourcesJar()
}
repositories {
@@ -30,7 +55,6 @@ if (hasProperty('buildScan')) {
dependencies {
testImplementation platform('org.junit:junit-bom:5.10.0')
testImplementation 'org.junit.jupiter:junit-jupiter'
-
implementation 'net.java.dev.jna:jna:5.12.1'
testImplementation 'io.tmio:tuweni-bytes:2.4.2'
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.7.2'
@@ -38,9 +62,7 @@ dependencies {
testImplementation 'com.fasterxml.jackson.core:jackson-databind:2.12.5'
testImplementation 'org.assertj:assertj-core:3.22.0'
testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.2'
-
def jacksonVersion = "2.14.2"
-
testFixturesImplementation("org.apache.tuweni:tuweni-units:2.3.1")
testFixturesImplementation("com.fasterxml.jackson.core:jackson-databind:${jacksonVersion}")
testFixturesImplementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${jacksonVersion}")
@@ -51,79 +73,75 @@ test {
dependsOn cleanTest
testLogging.showStandardStreams = true
}
-
-publishing {
- publications {
- maven(MavenPublication) {
- from components.java
- pom {
- name = 'Verkle Cryptography Library'
- description = 'A library for working with Cryptography used in verkle trie'
- url = 'https://github.com/crate-crypto/rust-verkle'
- licenses {
- license {
- name = 'The Apache License, Version 2.0'
- url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
- }
- }
- developers {
- developer {
- id = 'kevthedev'
- name = 'Kevaundray Wedderburn'
- email = 'kev@the.dev'
- }
- }
- scm {
- connection = 'scm:git:git://github.com/crate-crypto/rust-verkle.git'
- developerConnection = 'scm:git:ssh://github.com:crate-crypto/rust-verkle.git'
- url = 'https://github.com/crate-crypto/rust-verkle'
+publishing {
+ publications {
+ maven(MavenPublication) {
+ from components.java
+ pom {
+ name = 'Verkle Cryptography Library'
+ description = 'A library for working with Cryptography used in verkle trie'
+ url = 'https://github.com/crate-crypto/rust-verkle'
+ licenses {
+ license {
+ name = 'The Apache License, Version 2.0'
+ url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+ }
+ }
+ developers {
+ developer {
+ id = 'kevthedev'
+ name = 'Kevaundray Wedderburn'
+ email = 'kev@the.dev'
+ }
+ }
+ scm {
+ connection = 'scm:git:git://github.com/crate-crypto/rust-verkle.git'
+ developerConnection = 'scm:git:ssh://github.com:crate-crypto/rust-verkle.git'
+ url = 'https://github.com/crate-crypto/rust-verkle'
+ }
+ }
}
- }
}
- }
-
- repositories {
- maven {
- url = layout.buildDirectory.dir('staging-deploy')
+ repositories {
+ maven {
+ url = layout.buildDirectory.dir('staging-deploy')
+ }
}
- }
}
-
+
jreleaser {
- // Jreleaser will look for .git and its at the top level repository
- gitRootSearch = true
- release {
- // Skip releases as this is handled by release-please
- github {
- skipRelease = true
+ gitRootSearch = true
+ release {
+ github {
+ skipRelease = true
+ }
+ }
+ signing {
+ active = 'ALWAYS'
+ armored = true
}
- }
- signing {
- active = 'ALWAYS'
- armored = true
- }
- deploy {
- maven {
- mavenCentral {
- sonatype {
- active = 'ALWAYS'
- url = 'https://central.sonatype.com/api/v1/publisher'
- stagingRepository('build/staging-deploy')
+ deploy {
+ maven {
+ mavenCentral {
+ sonatype {
+ active = 'ALWAYS'
+ url = 'https://central.sonatype.com/api/v1/publisher'
+ stagingRepository('build/staging-deploy')
+ }
+ }
}
- }
}
- }
}
-// JReleaser does not create this directory, so we manually create it
+// JReleaser does not create this directory, so we manually create it
// ourselves.
tasks.register('createJReleaserOutputDir') {
- doLast {
- mkdir 'build/jreleaser'
- }
+ doLast {
+ mkdir 'build/jreleaser'
+ }
}
tasks.named('jreleaserFullRelease') {
- dependsOn 'createJReleaserOutputDir'
-}
\ No newline at end of file
+ dependsOn 'createJReleaserOutputDir'
+}
diff --git a/bindings/java/java_code/settings.gradle b/bindings/java/java_code/settings.gradle
index 5d2bc08..9e3e617 100644
--- a/bindings/java/java_code/settings.gradle
+++ b/bindings/java/java_code/settings.gradle
@@ -1,2 +1 @@
rootProject.name = 'java-verkle-cryptography'
-
diff --git a/bindings/java/java_code/src/main/java/verkle/cryptography/LibIpaMultipoint.java b/bindings/java/java_code/src/main/java/verkle/cryptography/LibIpaMultipoint.java
index 63eb607..5726162 100644
--- a/bindings/java/java_code/src/main/java/verkle/cryptography/LibIpaMultipoint.java
+++ b/bindings/java/java_code/src/main/java/verkle/cryptography/LibIpaMultipoint.java
@@ -28,8 +28,8 @@
/**
* Java interface to ipa-multipoint, a rust library that supports computing polynomial commitments.
*
- * The library relies on the bandersnatch curve described at https://eprint.iacr.org/2021/1152.pdf.
- *
+ *
The library relies on the bandersnatch curve described at
+ * https://eprint.iacr.org/2021/1152.pdf.
*/
public class LibIpaMultipoint {
@@ -49,7 +49,7 @@ public class LibIpaMultipoint {
* @return uncompressed serialised commitment.
*/
public static native byte[] commit(byte[] values);
-
+
/**
* Adds two commitments together.
*
@@ -76,12 +76,13 @@ public class LibIpaMultipoint {
* @param newValues new serialised scalars.
* @return uncompressed serialised commitment.
*/
- public static native byte[] updateSparse(byte[] commitment, byte[] indices, byte[] oldValues, byte[] newValues);
+ public static native byte[] updateSparse(
+ byte[] commitment, byte[] indices, byte[] oldValues, byte[] newValues);
/**
* Compresses a commitment.
*
- * Converts a serialised commitment from uncompressed to compressed form.
+ *
Converts a serialised commitment from uncompressed to compressed form.
*
* @param commitment uncompressed serialised commitment.
* @return compressed serialised commitment.
@@ -91,7 +92,7 @@ public class LibIpaMultipoint {
/**
* Compresses many commitments.
*
- * Converts a serialised commitment from uncompressed to compressed form.
+ *
Converts a serialised commitment from uncompressed to compressed form.
*
* @param commitments uncompressed serialised commitments.
* @return compressed serialised commitments.
@@ -109,8 +110,8 @@ public class LibIpaMultipoint {
/**
* Map a vector of commitments to its corresponding vector of scalars.
*
- * The vectorised version is highly optimised, making use of Montgoméry's batch
- * inversion trick.
+ *
The vectorised version is highly optimised, making use of Montgoméry's batch inversion
+ * trick.
*
* @param commitments uncompressed serialised commitments
* @return serialised scalars
@@ -119,10 +120,9 @@ public class LibIpaMultipoint {
/**
* Verifies the Verkle proof against the specified pre-state root
- *
- * This method interfaces with a native Rust implementation to verify a Verkle proof
- * against the specified pre-state root.
- *
+ *
+ * This method interfaces with a native Rust implementation to verify a Verkle proof against
+ * the specified pre-state root.
*
* @param keys accessed or modified keys
* @param currentValues current values associated with the keys.
@@ -136,102 +136,103 @@ public class LibIpaMultipoint {
* @param prestateRoot root of the prestate to be verified against.
* @return true if prestate root is correct
*/
- public static native boolean verifyPreStateRoot(byte[][] keys,
- byte[][] currentValues,
- byte[][] commitmentsByPath,
- byte[][] cl,
- byte[][] cr,
- byte[][] otherStems,
- byte[] d,
- byte[] depthsExtensionPresentStems,
- byte[] finalEvaluation,
- byte[] prestateRoot);
-
- // TODO:Replace the code below with jna.Native
-
- private static final String LIBRARY_NAME = "java_verkle_cryptography";
-
- private static String getNormalizedArchitecture() {
- String osArch = System.getProperty("os.arch").toLowerCase();
- if (osArch.equals("x86_64") || osArch.equals("amd64")) {
- return "x86_64";
- } else if (osArch.equals("aarch64") || osArch.equals("arm64")) {
- return "aarch64";
- } else {
- return osArch;
- }
+ public static native boolean verifyPreStateRoot(
+ byte[][] keys,
+ byte[][] currentValues,
+ byte[][] commitmentsByPath,
+ byte[][] cl,
+ byte[][] cr,
+ byte[][] otherStems,
+ byte[] d,
+ byte[] depthsExtensionPresentStems,
+ byte[] finalEvaluation,
+ byte[] prestateRoot);
+
+ // TODO:Replace the code below with jna.Native
+
+ private static final String LIBRARY_NAME = "java_verkle_cryptography";
+
+ private static String getNormalizedArchitecture() {
+ String osArch = System.getProperty("os.arch").toLowerCase();
+ if (osArch.equals("x86_64") || osArch.equals("amd64")) {
+ return "x86_64";
+ } else if (osArch.equals("aarch64") || osArch.equals("arm64")) {
+ return "aarch64";
+ } else {
+ return osArch;
}
-
- private static void ensureLibraryLoaded() {
- if (!ENABLED) {
- synchronized (libraryLock) {
- if (!ENABLED) {
- loadNativeLibrary();
- ENABLED = true;
- }
+ }
+
+ private static void ensureLibraryLoaded() {
+ if (!ENABLED) {
+ synchronized (libraryLock) {
+ if (!ENABLED) {
+ loadNativeLibrary();
+ ENABLED = true;
}
}
}
+ }
- /** Loads the appropriate native library based on your platform. */
- private static void loadNativeLibrary() {
- String PLATFORM_NATIVE_LIBRARY_NAME = System.mapLibraryName(LIBRARY_NAME);
-
- String osName = System.getProperty("os.name").toLowerCase();
- String osArch = getNormalizedArchitecture();
- String libraryResourcePath = null;
-
- if (osName.contains("win")) {
- if (osArch.contains("x86_64")) {
- libraryResourcePath = "/x86_64-pc-windows-gnu/" + PLATFORM_NATIVE_LIBRARY_NAME;
- } else if (osArch.contains("x86")) {
- // We do not support 32 bit windows
- } else if (osArch.contains("aarch64")) {
- // We currently do not support arm on windows
- }
- } else if (osName.contains("mac")) {
- if (osArch.contains("x86_64")) {
- libraryResourcePath = "/x86_64-apple-darwin/" + PLATFORM_NATIVE_LIBRARY_NAME;
- } else if (osArch.contains("aarch64")) {
- libraryResourcePath = "/aarch64-apple-darwin/" + PLATFORM_NATIVE_LIBRARY_NAME;
- }
- } else if (osName.contains("linux")) {
- if (osArch.contains("x86_64")) {
- libraryResourcePath = "/x86_64-unknown-linux-gnu/" + PLATFORM_NATIVE_LIBRARY_NAME;
- } else if (osArch.contains("aarch64")) {
- libraryResourcePath = "/aarch64-unknown-linux-gnu/" + PLATFORM_NATIVE_LIBRARY_NAME;
- }
- }
-
- if (libraryResourcePath == null) {
- throw new UnsupportedOperationException("Unsupported OS or architecture: " + osName + ", " + osArch);
- }
+ /** Loads the appropriate native library based on your platform. */
+ private static void loadNativeLibrary() {
+ String PLATFORM_NATIVE_LIBRARY_NAME = System.mapLibraryName(LIBRARY_NAME);
+
+ String osName = System.getProperty("os.name").toLowerCase();
+ String osArch = getNormalizedArchitecture();
+ String libraryResourcePath = null;
+
+ if (osName.contains("win")) {
+ if (osArch.contains("x86_64")) {
+ libraryResourcePath = "/x86_64-pc-windows-gnu/" + PLATFORM_NATIVE_LIBRARY_NAME;
+ } else if (osArch.contains("x86")) {
+ // We do not support 32 bit windows
+ } else if (osArch.contains("aarch64")) {
+ // We currently do not support arm on windows
+ }
+ } else if (osName.contains("mac")) {
+ if (osArch.contains("x86_64")) {
+ libraryResourcePath = "/x86_64-apple-darwin/" + PLATFORM_NATIVE_LIBRARY_NAME;
+ } else if (osArch.contains("aarch64")) {
+ libraryResourcePath = "/aarch64-apple-darwin/" + PLATFORM_NATIVE_LIBRARY_NAME;
+ }
+ } else if (osName.contains("linux")) {
+ if (osArch.contains("x86_64")) {
+ libraryResourcePath = "/x86_64-unknown-linux-gnu/" + PLATFORM_NATIVE_LIBRARY_NAME;
+ } else if (osArch.contains("aarch64")) {
+ libraryResourcePath = "/aarch64-unknown-linux-gnu/" + PLATFORM_NATIVE_LIBRARY_NAME;
+ }
+ }
- InputStream libraryResource = LibIpaMultipoint.class.getResourceAsStream(libraryResourcePath);
-
- if (libraryResource == null) {
- try {
- System.loadLibrary(LIBRARY_NAME);
- } catch (UnsatisfiedLinkError __) {
- String exceptionMessage =
- String.format(
- "Couldn't load native library (%s). It wasn't available at %s or the library path.",
- LIBRARY_NAME, libraryResourcePath);
- throw new RuntimeException(exceptionMessage);
- }
- } else {
- try {
- Path tempDir = Files.createTempDirectory(LIBRARY_NAME + "@");
- tempDir.toFile().deleteOnExit();
- Path tempDll = tempDir.resolve(PLATFORM_NATIVE_LIBRARY_NAME);
- tempDll.toFile().deleteOnExit();
- Files.copy(libraryResource, tempDll, StandardCopyOption.REPLACE_EXISTING);
- libraryResource.close();
- System.load(tempDll.toString());
- } catch (IOException ex) {
- throw new UncheckedIOException(ex);
- }
- }
+ if (libraryResourcePath == null) {
+ throw new UnsupportedOperationException(
+ "Unsupported OS or architecture: " + osName + ", " + osArch);
}
+ InputStream libraryResource = LibIpaMultipoint.class.getResourceAsStream(libraryResourcePath);
+
+ if (libraryResource == null) {
+ try {
+ System.loadLibrary(LIBRARY_NAME);
+ } catch (UnsatisfiedLinkError __) {
+ String exceptionMessage =
+ String.format(
+ "Couldn't load native library (%s). It wasn't available at %s or the library path.",
+ LIBRARY_NAME, libraryResourcePath);
+ throw new RuntimeException(exceptionMessage);
+ }
+ } else {
+ try {
+ Path tempDir = Files.createTempDirectory(LIBRARY_NAME + "@");
+ tempDir.toFile().deleteOnExit();
+ Path tempDll = tempDir.resolve(PLATFORM_NATIVE_LIBRARY_NAME);
+ tempDll.toFile().deleteOnExit();
+ Files.copy(libraryResource, tempDll, StandardCopyOption.REPLACE_EXISTING);
+ libraryResource.close();
+ System.load(tempDll.toString());
+ } catch (IOException ex) {
+ throw new UncheckedIOException(ex);
+ }
+ }
+ }
}
diff --git a/bindings/java/java_code/src/test/java/verkle/cryptography/CommitRootTest.java b/bindings/java/java_code/src/test/java/verkle/cryptography/CommitRootTest.java
index 18d3f57..6b54708 100644
--- a/bindings/java/java_code/src/test/java/verkle/cryptography/CommitRootTest.java
+++ b/bindings/java/java_code/src/test/java/verkle/cryptography/CommitRootTest.java
@@ -15,47 +15,44 @@
*/
package verkle.cryptography;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
import static org.assertj.core.api.Assertions.*;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
-
-import org.apache.tuweni.bytes.Bytes;
-import org.apache.tuweni.bytes.Bytes32;
-import verkle.cryptography.LibIpaMultipoint;
-
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
+import org.apache.tuweni.bytes.Bytes;
+import org.apache.tuweni.bytes.Bytes32;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
public class CommitRootTest {
- private static final ObjectMapper objectMapper = new ObjectMapper();
-
- public static List JsonData() throws IOException {
- InputStream inputStream = PedersenCommitmentTest.class.getResourceAsStream("/commit_root_test.json");
- return objectMapper.readValue(inputStream, new TypeReference>() {
- });
- }
-
- static class TestData {
- public ArrayList frs;
- public String expected;
- }
-
- @ParameterizedTest
- @MethodSource("JsonData")
- public void TestPolynomialCommitments(TestData testData) {
- List FrBytes = new ArrayList<>();
- for (int i = 0; i < 256; i++) {
- Bytes32 value = Bytes32.fromHexString(testData.frs.get(i));
- FrBytes.add(value.reverse());
- }
- byte[] input = Bytes.concatenate(FrBytes).toArray();
- Bytes32 result = Bytes32.wrap(LibIpaMultipoint.commitAsCompressed(input));
- Bytes32 expected = Bytes32.fromHexString(testData.expected);
- assertThat(result).isEqualTo(expected);
+ private static final ObjectMapper objectMapper = new ObjectMapper();
+
+ public static List JsonData() throws IOException {
+ InputStream inputStream =
+ PedersenCommitmentTest.class.getResourceAsStream("/commit_root_test.json");
+ return objectMapper.readValue(inputStream, new TypeReference>() {});
+ }
+
+ static class TestData {
+ public ArrayList frs;
+ public String expected;
+ }
+
+ @ParameterizedTest
+ @MethodSource("JsonData")
+ public void TestPolynomialCommitments(TestData testData) {
+ List FrBytes = new ArrayList<>();
+ for (int i = 0; i < 256; i++) {
+ Bytes32 value = Bytes32.fromHexString(testData.frs.get(i));
+ FrBytes.add(value.reverse());
}
+ byte[] input = Bytes.concatenate(FrBytes).toArray();
+ Bytes32 result = Bytes32.wrap(LibIpaMultipoint.commitAsCompressed(input));
+ Bytes32 expected = Bytes32.fromHexString(testData.expected);
+ assertThat(result).isEqualTo(expected);
+ }
}
diff --git a/bindings/java/java_code/src/test/java/verkle/cryptography/LibIpaMultipointTest.java b/bindings/java/java_code/src/test/java/verkle/cryptography/LibIpaMultipointTest.java
index 091e865..0a718e0 100644
--- a/bindings/java/java_code/src/test/java/verkle/cryptography/LibIpaMultipointTest.java
+++ b/bindings/java/java_code/src/test/java/verkle/cryptography/LibIpaMultipointTest.java
@@ -15,176 +15,215 @@
*/
package verkle.cryptography;
-import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.*;
import java.math.BigInteger;
-
import org.apache.tuweni.bytes.Bytes;
import org.apache.tuweni.bytes.Bytes32;
-import verkle.cryptography.LibIpaMultipoint;
+import org.junit.jupiter.api.Test;
public class LibIpaMultipointTest {
- @Test
- public void testCallLibrary() {
- Bytes32 input = Bytes32.fromHexString("0x0000fe0c00000000000000000000000000000000000000000000000000000000");
- Bytes result = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
- Bytes expected = Bytes.fromHexString("0x0c7f8df856f6860c9f2c6cb0f86c10228e511cca1c4a08263189d629940cb189706cbaa63c436901b6355e10a524337d97688fa5b0cf6b2b91b98e654547f728");
- assertThat(result).isEqualTo(expected.reverse());
+ @Test
+ public void testCallLibrary() {
+ Bytes32 input =
+ Bytes32.fromHexString("0x0000fe0c00000000000000000000000000000000000000000000000000000000");
+ Bytes result = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
+ Bytes expected =
+ Bytes.fromHexString(
+ "0x0c7f8df856f6860c9f2c6cb0f86c10228e511cca1c4a08263189d629940cb189706cbaa63c436901b6355e10a524337d97688fa5b0cf6b2b91b98e654547f728");
+ assertThat(result).isEqualTo(expected.reverse());
+ }
+
+ @Test
+ public void testCallLibraryCommitRoot() {
+ Bytes32 input =
+ Bytes32.fromHexString("0x59d039a350f2f9c751a97ee39dd16235d410ac6945d2fd480b395a567a1fe300");
+ // Bytes32 result = Bytes32.wrap(LibIpaMultipoint.commitAsCompressed(input.toArray()));
+ Bytes32 result =
+ Bytes32.wrap(LibIpaMultipoint.compress(LibIpaMultipoint.commit(input.toArray())));
+ Bytes32 expected =
+ Bytes32.fromHexString("0x3337896554fd3960bef9a4d0ff658ee8ee470cf9ca88a3c807cbe128536c5c05");
+ assertThat(result).isEqualTo(expected);
+ }
+
+ @Test
+ public void testCallLibraryWithManyElements() {
+ Bytes32 element =
+ Bytes32.fromHexString("0x00ecc7e76c11ad699e887f96bff372b308836c14e22279c81265fb4130fe0c00");
+ Bytes32[] arr = new Bytes32[128];
+ for (int i = 0; i < 128; i++) {
+ arr[i] = element;
}
-
- @Test
- public void testCallLibraryCommitRoot() {
- Bytes32 input = Bytes32.fromHexString("0x59d039a350f2f9c751a97ee39dd16235d410ac6945d2fd480b395a567a1fe300");
- // Bytes32 result = Bytes32.wrap(LibIpaMultipoint.commitAsCompressed(input.toArray()));
- Bytes32 result = Bytes32.wrap(LibIpaMultipoint.compress(LibIpaMultipoint.commit(input.toArray())));
- Bytes32 expected = Bytes32.fromHexString("0x3337896554fd3960bef9a4d0ff658ee8ee470cf9ca88a3c807cbe128536c5c05");
- assertThat(result).isEqualTo(expected);
+ Bytes input = Bytes.concatenate(arr);
+ Bytes result = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
+ Bytes expected =
+ Bytes.fromHexString(
+ "0x0128b513cfb016d3d836b5fa4a8a1260395d4ca831d65027aa74b832d92e0d6d9beb8d5e42b78b99e4eb233e7eca6276c6f4bd235b35c091546e2a2119bc1455");
+ assertThat(result).isEqualTo(expected);
+ }
+
+ @Test
+ public void testCallLibraryWithMaxElements() {
+ Bytes32 element =
+ Bytes32.fromHexString("0x5b04e049425e6cfee43ddb1d8d57e44dd0fe8eff862125d907f6747f56206f00");
+ Bytes32[] arr = new Bytes32[256];
+ for (int i = 0; i < 256; i++) {
+ arr[i] = element;
}
+ Bytes input = Bytes.concatenate(arr);
+ Bytes result = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
+ Bytes expected =
+ Bytes.fromHexString(
+ "0xcfb8d6fe536dec3d72ae549a0b58c7d2d119e7dd58adb2663369275307cd5a1f8adafed4044dbdc9ba9fb4f7ea0e44ab14c1c47297633015d175d7dcaffeb843");
+ assertThat(result).isEqualTo(expected);
+ }
+
+ @Test
+ public void testUpdateCommitmentSparseIdentityCommitment() {
+ // Numbers and result is taken from:
+ // https://github.com/crate-crypto/rust-verkle/blob/bb5af2f2fe9788d49d2896b9614a3125f8227818/ffi_interface/src/lib.rs#L576
+ // Identity element
+ Bytes oldCommitment =
+ Bytes.fromHexString(
+ "0x00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000");
+
+ Bytes oldScalar1 =
+ Bytes.fromHexString("0x0200000000000000000000000000000000000000000000000000000000000000");
+ Bytes newScalar1 =
+ Bytes.fromHexString("0x1300000000000000000000000000000000000000000000000000000000000000");
+ Bytes index1 = Bytes.fromHexString("0x07");
+
+ Bytes oldScalar2 =
+ Bytes.fromHexString("0x0200000000000000000000000000000000000000000000000000000000000000");
+ Bytes newScalar2 =
+ Bytes.fromHexString("0x1100000000000000000000000000000000000000000000000000000000000000");
+ Bytes index2 = Bytes.fromHexString("0x08");
+
+ Bytes indices = Bytes.concatenate(index1, index2);
+ Bytes oldScalars = Bytes.concatenate(oldScalar1, oldScalar2);
+ Bytes newScalars = Bytes.concatenate(newScalar1, newScalar2);
+
+ Bytes result =
+ Bytes.of(
+ LibIpaMultipoint.updateSparse(
+ oldCommitment.toArray(), indices.toArray(),
+ oldScalars.toArray(), newScalars.toArray()));
+ assertThat(result)
+ .isEqualTo(
+ Bytes.fromHexString(
+ "6cf7264f1fff79a21b1be098e66e2457f2cba14c36c33a794566f85be8e6c61dc2a29760223e7c568af4ca13a08535d3e66ba7e2dd1e053894f1fdccdc560a54"));
+ }
+
+ @Test
+ public void testUpdateCommitmentSparseNonIdentityCommitment() {
+ // These values are taken from:
+ // https://github.com/crate-crypto/rust-verkle/blob/bb5af2f2fe9788d49d2896b9614a3125f8227818/ffi_interface/src/lib.rs#L494
+ Bytes oldCommitment =
+ Bytes.fromHexString(
+ "c2a169fe13aab966d6642801727c8534e40b355372890e18a9880f66b88e143a37fe18000aaf81d4536b64ec3266678c56baf81645d4cfd5133a908247ab8445");
+ Bytes oldScalar1 =
+ Bytes.fromHexString("0x0400000000000000000000000000000000000000000000000000000000000000");
+ Bytes newScalar1 =
+ Bytes.fromHexString("0x7f00000000000000000000000000000000000000000000000000000000000000");
+ Bytes index1 = Bytes.fromHexString("0x01");
+
+ Bytes oldScalar2 =
+ Bytes.fromHexString("0x0900000000000000000000000000000000000000000000000000000000000000");
+ Bytes newScalar2 =
+ Bytes.fromHexString("0xff00000000000000000000000000000000000000000000000000000000000000");
+ Bytes index2 = Bytes.fromHexString("0x02");
+
+ Bytes indices = Bytes.concatenate(index1, index2);
+ Bytes oldScalars = Bytes.concatenate(oldScalar1, oldScalar2);
+ Bytes newScalars = Bytes.concatenate(newScalar1, newScalar2);
+
+ Bytes result =
+ Bytes.of(
+ LibIpaMultipoint.updateSparse(
+ oldCommitment.toArray(), indices.toArray(),
+ oldScalars.toArray(), newScalars.toArray()));
+ assertThat(result)
+ .isEqualTo(
+ Bytes.fromHexString(
+ "2dd3bb69da79ecd91a74b188bfddc74827a995dec07e5308f8215f08d69e77330b11628c6d3313a7781b74850e64cb6ac706290da79e56ff311a10214d14dc36"));
+ }
+
+ @Test
+ public void testAddCommitment() {
+ // Taken from `smoke_test_add_commitment_fixed` in ffi_interface
+
+ Bytes lhs =
+ Bytes.fromHexString(
+ "0x0ff070a99e9f38e4f1ec1db91795ef4942fcd188152562c2773d9125236a50444687ab68507977d6276428d7d570a3c95efa406427f6641ba1e247133d17e030");
+ Bytes rhs =
+ Bytes.fromHexString(
+ "0x333e05d05e6533e993f519c23dbce6205fb9e0b78f38b3336d9c4296f144cb0204c389bb5e6925157ce16eda2ebf45640956be98e2be2df77a86f0bca135da21");
+ Bytes expected =
+ Bytes.fromHexString(
+ "0x8b5feb2eb0cc73a8ca2f24ae7b2c61e88ff0b019dea9b881d1b5f7815280b6393834cb80ab2c09984c5b9f70be680206a6e12c8bbb169fe5ab672f45c5d51e20");
+
+ Bytes result = Bytes.of(LibIpaMultipoint.addCommitment(lhs.toArray(), rhs.toArray()));
+
+ assertThat(result).isEqualTo(expected);
+ }
+
+ @Test
+ public void testGetTreeKeySubIndex0() {
+ // Taken from "get_tree_key_add_commitment_equivalence" test in rust ffi_interface
+ // code.
+ BigInteger[] chunkedInput =
+ new BigInteger[] {
+ BigInteger.valueOf(16386),
+ new BigInteger("21345817372864405881847059188222722561"),
+ new BigInteger("42696867846335054569745073772176806417"),
+ new BigInteger("65392825175610104412738625059090743104"),
+ new BigInteger("44041774702139455724840610475136659248")
+ };
- @Test
- public void testCallLibraryWithManyElements() {
- Bytes32 element = Bytes32.fromHexString("0x00ecc7e76c11ad699e887f96bff372b308836c14e22279c81265fb4130fe0c00");
- Bytes32[] arr = new Bytes32[128];
- for (int i = 0; i < 128; i++) {
- arr[i] = element;
- }
- Bytes input = Bytes.concatenate(arr);
- Bytes result = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
- Bytes expected = Bytes.fromHexString("0x0128b513cfb016d3d836b5fa4a8a1260395d4ca831d65027aa74b832d92e0d6d9beb8d5e42b78b99e4eb233e7eca6276c6f4bd235b35c091546e2a2119bc1455");
- assertThat(result).isEqualTo(expected);
- }
+ Bytes expectedHashForSubIndex0 =
+ Bytes.fromHexString("ff7e3916badeb510dfcdad458726273319280742e553d8d229bd676428147300");
- @Test
- public void testCallLibraryWithMaxElements() {
- Bytes32 element = Bytes32.fromHexString("0x5b04e049425e6cfee43ddb1d8d57e44dd0fe8eff862125d907f6747f56206f00");
- Bytes32[] arr = new Bytes32[256];
- for (int i = 0; i < 256; i++) {
- arr[i] = element;
- }
- Bytes input = Bytes.concatenate(arr);
- Bytes result = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
- Bytes expected = Bytes.fromHexString("0xcfb8d6fe536dec3d72ae549a0b58c7d2d119e7dd58adb2663369275307cd5a1f8adafed4044dbdc9ba9fb4f7ea0e44ab14c1c47297633015d175d7dcaffeb843");
- assertThat(result).isEqualTo(expected);
- }
+ Bytes32 marker = toBytes32LE(chunkedInput[0]);
+ Bytes32 addressLow = toBytes32LE(chunkedInput[1]);
+ Bytes32 addressHigh = toBytes32LE(chunkedInput[2]);
+ Bytes32 treeIndexLow = toBytes32LE(chunkedInput[3]);
+ Bytes32 treeIndexHigh = toBytes32LE(chunkedInput[4]);
- @Test
- public void testUpdateCommitmentSparseIdentityCommitment() {
- // Numbers and result is taken from: https://github.com/crate-crypto/rust-verkle/blob/bb5af2f2fe9788d49d2896b9614a3125f8227818/ffi_interface/src/lib.rs#L576
- // Identity element
- Bytes oldCommitment = Bytes.fromHexString("0x00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000");
-
- Bytes oldScalar1 = Bytes.fromHexString("0x0200000000000000000000000000000000000000000000000000000000000000");
- Bytes newScalar1 = Bytes.fromHexString("0x1300000000000000000000000000000000000000000000000000000000000000");
- Bytes index1 = Bytes.fromHexString("0x07");
-
- Bytes oldScalar2 = Bytes.fromHexString("0x0200000000000000000000000000000000000000000000000000000000000000");
- Bytes newScalar2 = Bytes.fromHexString("0x1100000000000000000000000000000000000000000000000000000000000000");
- Bytes index2 = Bytes.fromHexString("0x08");
-
- Bytes indices = Bytes.concatenate(index1, index2);
- Bytes oldScalars = Bytes.concatenate(oldScalar1, oldScalar2);
- Bytes newScalars = Bytes.concatenate(newScalar1, newScalar2);
-
- Bytes result = Bytes.of(LibIpaMultipoint.updateSparse(
- oldCommitment.toArray(), indices.toArray(),
- oldScalars.toArray(), newScalars.toArray()
- ));
- assertThat(result).isEqualTo(Bytes.fromHexString("6cf7264f1fff79a21b1be098e66e2457f2cba14c36c33a794566f85be8e6c61dc2a29760223e7c568af4ca13a08535d3e66ba7e2dd1e053894f1fdccdc560a54"));
- }
+ Bytes address = Bytes.concatenate(addressLow, addressHigh);
+ Bytes addressWithMarker = Bytes.concatenate(marker, address);
+ Bytes addressCached = Bytes.of(LibIpaMultipoint.commit(addressWithMarker.toArray()));
- @Test
- public void testUpdateCommitmentSparseNonIdentityCommitment() {
- // These values are taken from: https://github.com/crate-crypto/rust-verkle/blob/bb5af2f2fe9788d49d2896b9614a3125f8227818/ffi_interface/src/lib.rs#L494
- Bytes oldCommitment = Bytes.fromHexString(
- "c2a169fe13aab966d6642801727c8534e40b355372890e18a9880f66b88e143a37fe18000aaf81d4536b64ec3266678c56baf81645d4cfd5133a908247ab8445");
- Bytes oldScalar1 = Bytes.fromHexString("0x0400000000000000000000000000000000000000000000000000000000000000");
- Bytes newScalar1 = Bytes.fromHexString("0x7f00000000000000000000000000000000000000000000000000000000000000");
- Bytes index1 = Bytes.fromHexString("0x01");
+ Bytes32 zero =
+ Bytes32.fromHexString("0x0000000000000000000000000000000000000000000000000000000000000000");
+ Bytes32[] treeIndex = new Bytes32[5];
+ treeIndex[0] = zero;
+ treeIndex[1] = zero;
+ treeIndex[2] = zero;
+ treeIndex[3] = treeIndexLow;
+ treeIndex[4] = treeIndexHigh;
+ Bytes input = Bytes.concatenate(treeIndex);
- Bytes oldScalar2 = Bytes.fromHexString("0x0900000000000000000000000000000000000000000000000000000000000000");
- Bytes newScalar2 = Bytes.fromHexString("0xff00000000000000000000000000000000000000000000000000000000000000");
- Bytes index2 = Bytes.fromHexString("0x02");
+ Bytes treeIndexCommit = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
- Bytes indices = Bytes.concatenate(index1, index2);
- Bytes oldScalars = Bytes.concatenate(oldScalar1, oldScalar2);
- Bytes newScalars = Bytes.concatenate(newScalar1, newScalar2);
+ byte[] committedPoint =
+ LibIpaMultipoint.addCommitment(addressCached.toArray(), treeIndexCommit.toArray());
- Bytes result = Bytes.of(LibIpaMultipoint.updateSparse(
- oldCommitment.toArray(), indices.toArray(),
- oldScalars.toArray(), newScalars.toArray()));
- assertThat(result).isEqualTo(Bytes.fromHexString(
- "2dd3bb69da79ecd91a74b188bfddc74827a995dec07e5308f8215f08d69e77330b11628c6d3313a7781b74850e64cb6ac706290da79e56ff311a10214d14dc36"));
+ byte[] key = LibIpaMultipoint.hash(committedPoint);
+ key[31] = 0; // modify the last byte to simulate get_tree_key using sub_index=0
- }
-
- @Test
- public void testAddCommitment() {
- // Taken from `smoke_test_add_commitment_fixed` in ffi_interface
-
- Bytes lhs = Bytes.fromHexString("0x0ff070a99e9f38e4f1ec1db91795ef4942fcd188152562c2773d9125236a50444687ab68507977d6276428d7d570a3c95efa406427f6641ba1e247133d17e030");
- Bytes rhs = Bytes.fromHexString("0x333e05d05e6533e993f519c23dbce6205fb9e0b78f38b3336d9c4296f144cb0204c389bb5e6925157ce16eda2ebf45640956be98e2be2df77a86f0bca135da21");
- Bytes expected = Bytes.fromHexString(
- "0x8b5feb2eb0cc73a8ca2f24ae7b2c61e88ff0b019dea9b881d1b5f7815280b6393834cb80ab2c09984c5b9f70be680206a6e12c8bbb169fe5ab672f45c5d51e20");
-
- Bytes result = Bytes.of(LibIpaMultipoint.addCommitment(lhs.toArray(), rhs.toArray()));
-
- assertThat(result).isEqualTo(expected);
- }
+ assertThat(Bytes.of(key)).isEqualTo(expectedHashForSubIndex0);
+ }
- @Test
- public void testGetTreeKeySubIndex0() {
- // Taken from "get_tree_key_add_commitment_equivalence" test in rust ffi_interface
- // code.
- BigInteger[] chunkedInput = new BigInteger[] {
- BigInteger.valueOf(16386),
- new BigInteger("21345817372864405881847059188222722561"),
- new BigInteger("42696867846335054569745073772176806417"),
- new BigInteger("65392825175610104412738625059090743104"),
- new BigInteger("44041774702139455724840610475136659248")
- };
+ private static Bytes32 toBytes32LE(BigInteger value) {
+ byte[] bytes = new byte[32];
+ byte[] valueBytes = value.toByteArray();
- Bytes expectedHashForSubIndex0 = Bytes.fromHexString("ff7e3916badeb510dfcdad458726273319280742e553d8d229bd676428147300");
-
- Bytes32 marker = toBytes32LE(chunkedInput[0]);
- Bytes32 addressLow = toBytes32LE(chunkedInput[1]);
- Bytes32 addressHigh = toBytes32LE(chunkedInput[2]);
- Bytes32 treeIndexLow = toBytes32LE(chunkedInput[3]);
- Bytes32 treeIndexHigh = toBytes32LE(chunkedInput[4]);
-
- Bytes address = Bytes.concatenate(addressLow, addressHigh);
- Bytes addressWithMarker = Bytes.concatenate(marker, address);
- Bytes addressCached = Bytes.of(LibIpaMultipoint.commit(addressWithMarker.toArray()));
-
- Bytes32 zero = Bytes32.fromHexString("0x0000000000000000000000000000000000000000000000000000000000000000");
- Bytes32[] treeIndex = new Bytes32[5];
- treeIndex[0] = zero;
- treeIndex[1] = zero;
- treeIndex[2] = zero;
- treeIndex[3] = treeIndexLow;
- treeIndex[4] = treeIndexHigh;
- Bytes input = Bytes.concatenate(treeIndex);
-
- Bytes treeIndexCommit = Bytes.wrap(LibIpaMultipoint.commit(input.toArray()));
-
- byte[] committedPoint = LibIpaMultipoint.addCommitment(addressCached.toArray(), treeIndexCommit.toArray());
-
- byte[] key = LibIpaMultipoint.hash(committedPoint);
- key[31] = 0; // modify the last byte to simulate get_tree_key using sub_index=0
-
- assertThat(Bytes.of(key)).isEqualTo(expectedHashForSubIndex0);
+ // Copy in reverse order directly into the target array
+ for (int i = 0; i < valueBytes.length; i++) {
+ bytes[i] = valueBytes[valueBytes.length - 1 - i];
}
- private static Bytes32 toBytes32LE(BigInteger value) {
- byte[] bytes = new byte[32];
- byte[] valueBytes = value.toByteArray();
-
- // Copy in reverse order directly into the target array
- for (int i = 0; i < valueBytes.length; i++) {
- bytes[i] = valueBytes[valueBytes.length - 1 - i];
- }
-
- return Bytes32.wrap(bytes);
- }
+ return Bytes32.wrap(bytes);
+ }
}
diff --git a/bindings/java/java_code/src/test/java/verkle/cryptography/PedersenCommitmentTest.java b/bindings/java/java_code/src/test/java/verkle/cryptography/PedersenCommitmentTest.java
index 65a748f..2c7ed5e 100644
--- a/bindings/java/java_code/src/test/java/verkle/cryptography/PedersenCommitmentTest.java
+++ b/bindings/java/java_code/src/test/java/verkle/cryptography/PedersenCommitmentTest.java
@@ -1,48 +1,43 @@
package verkle.cryptography;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.MethodSource;
import static org.assertj.core.api.Assertions.*;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
-
-import org.apache.tuweni.bytes.Bytes;
-import org.apache.tuweni.bytes.Bytes32;
-import verkle.cryptography.LibIpaMultipoint;
-
import java.io.IOException;
import java.io.InputStream;
-import java.math.BigInteger;
-import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
-
+import org.apache.tuweni.bytes.Bytes;
+import org.apache.tuweni.bytes.Bytes32;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
public class PedersenCommitmentTest {
- private static final ObjectMapper objectMapper = new ObjectMapper();
-
- public static List JsonData() throws IOException {
- InputStream inputStream = PedersenCommitmentTest.class.getResourceAsStream("/pedersen_commitment_test.json");
- return objectMapper.readValue(inputStream, new TypeReference>() {});
- }
-
- static class TestData {
- public ArrayList frs;
- public String commitment;
- }
-
- @ParameterizedTest
- @MethodSource("JsonData")
- public void TestPolynomialCommitments(TestData testData) {
- List FrBytes = new ArrayList<>();
- for (int i = 0 ; i < 256; i++ ) {
- Bytes32 value = Bytes32.fromHexString(testData.frs.get(i));
- FrBytes.add(value);
- }
- byte[] input = Bytes.concatenate(FrBytes).toArray();
- Bytes result = Bytes.wrap(LibIpaMultipoint.hash(LibIpaMultipoint.commit(input)));
- Bytes expected = Bytes.fromHexString(testData.commitment);
- assertThat(result).isEqualTo(expected);
+ private static final ObjectMapper objectMapper = new ObjectMapper();
+
+ public static List JsonData() throws IOException {
+ InputStream inputStream =
+ PedersenCommitmentTest.class.getResourceAsStream("/pedersen_commitment_test.json");
+ return objectMapper.readValue(inputStream, new TypeReference>() {});
+ }
+
+ static class TestData {
+ public ArrayList frs;
+ public String commitment;
+ }
+
+ @ParameterizedTest
+ @MethodSource("JsonData")
+ public void TestPolynomialCommitments(TestData testData) {
+ List FrBytes = new ArrayList<>();
+ for (int i = 0; i < 256; i++) {
+ Bytes32 value = Bytes32.fromHexString(testData.frs.get(i));
+ FrBytes.add(value);
}
+ byte[] input = Bytes.concatenate(FrBytes).toArray();
+ Bytes result = Bytes.wrap(LibIpaMultipoint.hash(LibIpaMultipoint.commit(input)));
+ Bytes expected = Bytes.fromHexString(testData.commitment);
+ assertThat(result).isEqualTo(expected);
+ }
}
diff --git a/bindings/java/java_code/src/test/java/verkle/cryptography/proof/ExecutionWitnessData.java b/bindings/java/java_code/src/test/java/verkle/cryptography/proof/ExecutionWitnessData.java
index 9051c26..c0189b7 100644
--- a/bindings/java/java_code/src/test/java/verkle/cryptography/proof/ExecutionWitnessData.java
+++ b/bindings/java/java_code/src/test/java/verkle/cryptography/proof/ExecutionWitnessData.java
@@ -16,86 +16,85 @@
package verkle.cryptography.proof;
import com.fasterxml.jackson.annotation.JsonProperty;
-
import java.util.List;
-
public class ExecutionWitnessData {
- @JsonProperty("header")
- public Header header;
- @JsonProperty("executionWitness")
- public ExecutionWitness executionWitness;
+ @JsonProperty("header")
+ public Header header;
+
+ @JsonProperty("executionWitness")
+ public ExecutionWitness executionWitness;
+ static class Header {
- static class Header {
+ @JsonProperty("blockNumber")
+ public String blockNumber;
- @JsonProperty("blockNumber")
- public String blockNumber;
- @JsonProperty("parentHash")
- public String parentHash;
- @JsonProperty("stateRoot")
- public String stateRoot;
+ @JsonProperty("parentHash")
+ public String parentHash;
- }
+ @JsonProperty("stateRoot")
+ public String stateRoot;
+ }
+ static class ExecutionWitness {
- static class ExecutionWitness {
+ @JsonProperty("stateDiff")
+ public List stateDiff;
- @JsonProperty("stateDiff")
- public List stateDiff;
- @JsonProperty("verkleProof")
- public VerkleProof verkleProof;
+ @JsonProperty("verkleProof")
+ public VerkleProof verkleProof;
+ }
- }
+ static class StateDiff {
+ @JsonProperty("stem")
+ public String stem;
- static class StateDiff {
+ @JsonProperty("suffixDiffs")
+ public List suffixDiffs;
+ }
- @JsonProperty("stem")
- public String stem;
- @JsonProperty("suffixDiffs")
- public List suffixDiffs;
+ static class SuffixDiff {
- }
+ @JsonProperty("suffix")
+ public int suffix;
+ @JsonProperty("currentValue")
+ public String currentValue;
- static class SuffixDiff {
+ @JsonProperty("newValue")
+ public String newValue;
+ }
- @JsonProperty("suffix")
- public int suffix;
- @JsonProperty("currentValue")
- public String currentValue;
- @JsonProperty("newValue")
- public String newValue;
+ static class VerkleProof {
- }
+ @JsonProperty("otherStems")
+ public List otherStems;
+ @JsonProperty("depthExtensionPresent")
+ public String depthExtensionPresent;
- static class VerkleProof {
+ @JsonProperty("commitmentsByPath")
+ public List commitmentsByPath;
- @JsonProperty("otherStems")
- public List otherStems;
- @JsonProperty("depthExtensionPresent")
- public String depthExtensionPresent;
- @JsonProperty("commitmentsByPath")
- public List commitmentsByPath;
- @JsonProperty("d")
- public String d;
- @JsonProperty("ipaProof")
- public IpaProof ipaProof;
+ @JsonProperty("d")
+ public String d;
- }
+ @JsonProperty("ipaProof")
+ public IpaProof ipaProof;
+ }
+ static class IpaProof {
- static class IpaProof {
+ @JsonProperty("cl")
+ public List cl;
- @JsonProperty("cl")
- public List cl;
- @JsonProperty("cr")
- public List cr;
- @JsonProperty("finalEvaluation")
- public String finalEvaluation;
+ @JsonProperty("cr")
+ public List cr;
- }
-}
\ No newline at end of file
+ @JsonProperty("finalEvaluation")
+ public String finalEvaluation;
+ }
+}
diff --git a/bindings/java/java_code/src/test/java/verkle/cryptography/proof/VerifyProofTest.java b/bindings/java/java_code/src/test/java/verkle/cryptography/proof/VerifyProofTest.java
index 8b35358..9f903d0 100644
--- a/bindings/java/java_code/src/test/java/verkle/cryptography/proof/VerifyProofTest.java
+++ b/bindings/java/java_code/src/test/java/verkle/cryptography/proof/VerifyProofTest.java
@@ -15,76 +15,113 @@
*/
package verkle.cryptography.proof;
+import static org.assertj.core.api.Assertions.assertThat;
+
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.tuweni.bytes.Bytes;
-import verkle.cryptography.LibIpaMultipoint;
-import org.junit.jupiter.params.ParameterizedTest;
-import org.junit.jupiter.params.provider.Arguments;
-import org.junit.jupiter.params.provider.MethodSource;
-
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Stream;
-
-import static org.assertj.core.api.Assertions.assertThat;
+import org.apache.tuweni.bytes.Bytes;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+import verkle.cryptography.LibIpaMultipoint;
public class VerifyProofTest {
- private static final ObjectMapper objectMapper = new ObjectMapper();
+ private static final ObjectMapper objectMapper = new ObjectMapper();
- static {
- objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
- }
+ static {
+ objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+ }
+ public static Stream getParameters() {
+ return Stream.of(
+ Arguments.of(
+ "/valid_block_1.json",
+ "0x1fbf85345a3cbba9a6d44f991b721e55620a22397c2a93ee8d5011136ac300ee",
+ true),
+ Arguments.of(
+ "/valid_block_72.json",
+ "0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510",
+ true),
+ Arguments.of(
+ "/invalid_block_72.json",
+ "0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510",
+ false),
+ Arguments.of(
+ "/valid_block_73.json",
+ "0x18d1dfcc6ccc6f34d14af48a865895bf34bde7f3571d9ba24a4b98122841048c",
+ true),
+ Arguments.of(
+ "/invalid_block_73.json",
+ "0x18d1dfcc6ccc6f34d14af48a865895bf34bde7f3571d9ba24a4b98122841048c",
+ false));
+ }
- public static Stream getParameters() {
- return Stream.of(
- Arguments.of("/valid_block_1.json","0x1fbf85345a3cbba9a6d44f991b721e55620a22397c2a93ee8d5011136ac300ee", true),
- Arguments.of("/valid_block_72.json", "0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510", true),
- Arguments.of("/invalid_block_72.json", "0x64e1a647f42e5c2e3c434531ccf529e1b3e93363a40db9fc8eec81f492123510", false),
- Arguments.of("/valid_block_73.json", "0x18d1dfcc6ccc6f34d14af48a865895bf34bde7f3571d9ba24a4b98122841048c", true),
- Arguments.of("/invalid_block_73.json", "0x18d1dfcc6ccc6f34d14af48a865895bf34bde7f3571d9ba24a4b98122841048c", false)
- );
- }
+ @ParameterizedTest(name = "{index}: {0}")
+ @MethodSource("getParameters")
+ public void TestVerifyPreStateRoot(
+ final String fileName, final String preStateRoot, final boolean isValid) throws IOException {
+ final InputStream inputStream = VerifyProofTest.class.getResourceAsStream(fileName);
+ final ExecutionWitnessData executionWitnessData =
+ objectMapper.readValue(inputStream, new TypeReference<>() {});
+ final Bytes prestateRoot = Bytes.fromHexString(preStateRoot);
+ assertThat(verifyPreState(executionWitnessData, prestateRoot)).isEqualTo(isValid);
+ }
- @ParameterizedTest(name = "{index}: {0}")
- @MethodSource("getParameters")
- public void TestVerifyPreStateRoot(final String fileName, final String preStateRoot, final boolean isValid) throws IOException {
- final InputStream inputStream = VerifyProofTest.class.getResourceAsStream(fileName);
- final ExecutionWitnessData executionWitnessData = objectMapper.readValue(inputStream, new TypeReference<>() {
+ private boolean verifyPreState(
+ final ExecutionWitnessData executionWitnessData, final Bytes preStateRoot) {
+ final List allStemsKeys = new ArrayList<>();
+ final List allCurrentValues = new ArrayList<>();
+ executionWitnessData.executionWitness.stateDiff.forEach(
+ stateDiff -> {
+ Bytes stem = Bytes.fromHexString(stateDiff.stem);
+ stateDiff.suffixDiffs.forEach(
+ suffixDiff -> {
+ allStemsKeys.add(
+ Bytes.concatenate(stem, Bytes.of(suffixDiff.suffix)).toArrayUnsafe());
+ allCurrentValues.add(
+ ((suffixDiff.currentValue == null)
+ ? Bytes.EMPTY
+ : Bytes.fromHexString(suffixDiff.currentValue))
+ .toArrayUnsafe());
+ });
});
- final Bytes prestateRoot = Bytes.fromHexString(preStateRoot);
- assertThat(verifyPreState(executionWitnessData, prestateRoot)).isEqualTo(isValid);
- }
-
- private boolean verifyPreState(final ExecutionWitnessData executionWitnessData, final Bytes preStateRoot){
- final List allStemsKeys = new ArrayList<>();
- final List allCurrentValues = new ArrayList<>();
- executionWitnessData.executionWitness.stateDiff.forEach(stateDiff -> {
- Bytes stem = Bytes.fromHexString(stateDiff.stem);
- stateDiff.suffixDiffs.forEach(suffixDiff -> {
- allStemsKeys.add(Bytes.concatenate(stem,Bytes.of(suffixDiff.suffix)).toArrayUnsafe());
- allCurrentValues.add(((suffixDiff.currentValue==null)?Bytes.EMPTY:Bytes.fromHexString(suffixDiff.currentValue)).toArrayUnsafe());
- });
- });
- final byte[][] commitmentsByPath = toArray(executionWitnessData.executionWitness.verkleProof.commitmentsByPath);
- final byte[][] allCl = toArray(executionWitnessData.executionWitness.verkleProof.ipaProof.cl);
- final byte[][] allCr = toArray(executionWitnessData.executionWitness.verkleProof.ipaProof.cr);
- final byte[][] allOtherStems = toArray(executionWitnessData.executionWitness.verkleProof.otherStems);
- final byte[] d = Bytes.fromHexString(executionWitnessData.executionWitness.verkleProof.d).toArrayUnsafe();
- final byte[] depthExtensionPresent = Bytes.fromHexString(executionWitnessData.executionWitness.verkleProof.depthExtensionPresent).toArrayUnsafe();
- final byte[] finalEvaluation = Bytes.fromHexString(executionWitnessData.executionWitness.verkleProof.ipaProof.finalEvaluation).toArrayUnsafe();
-
- return LibIpaMultipoint.verifyPreStateRoot(allStemsKeys.toArray(byte[][]::new), allCurrentValues.toArray(byte[][]::new), commitmentsByPath, allCl, allCr, allOtherStems, d, depthExtensionPresent, finalEvaluation, preStateRoot.toArrayUnsafe());
- }
-
+ final byte[][] commitmentsByPath =
+ toArray(executionWitnessData.executionWitness.verkleProof.commitmentsByPath);
+ final byte[][] allCl = toArray(executionWitnessData.executionWitness.verkleProof.ipaProof.cl);
+ final byte[][] allCr = toArray(executionWitnessData.executionWitness.verkleProof.ipaProof.cr);
+ final byte[][] allOtherStems =
+ toArray(executionWitnessData.executionWitness.verkleProof.otherStems);
+ final byte[] d =
+ Bytes.fromHexString(executionWitnessData.executionWitness.verkleProof.d).toArrayUnsafe();
+ final byte[] depthExtensionPresent =
+ Bytes.fromHexString(executionWitnessData.executionWitness.verkleProof.depthExtensionPresent)
+ .toArrayUnsafe();
+ final byte[] finalEvaluation =
+ Bytes.fromHexString(
+ executionWitnessData.executionWitness.verkleProof.ipaProof.finalEvaluation)
+ .toArrayUnsafe();
- private byte[][] toArray(final List elt){
- return elt.stream().map(Bytes::fromHexString).map(Bytes::toArrayUnsafe).toArray(byte[][]::new);
- }
+ return LibIpaMultipoint.verifyPreStateRoot(
+ allStemsKeys.toArray(byte[][]::new),
+ allCurrentValues.toArray(byte[][]::new),
+ commitmentsByPath,
+ allCl,
+ allCr,
+ allOtherStems,
+ d,
+ depthExtensionPresent,
+ finalEvaluation,
+ preStateRoot.toArrayUnsafe());
+ }
+ private byte[][] toArray(final List elt) {
+ return elt.stream().map(Bytes::fromHexString).map(Bytes::toArrayUnsafe).toArray(byte[][]::new);
+ }
}