Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refinement in IPA templating #359

Merged
merged 2 commits into from
Mar 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/provider/hyperkzg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
//! This means that Spartan's polynomial IOP can use commit to its polynomials as-is without incurring any interpolations or FFTs.
//! (2) HyperKZG is specialized to use KZG as the univariate commitment scheme, so it includes several optimizations (both during the transformation of multilinear-to-univariate claims
//! and within the KZG commitment scheme implementation itself).
//! (3) HyperKZG also includes optimisation based on so called Shplonk/HaloInfinite technique (https://hackmd.io/@adrian-aztec/BJxoyeCqj#Phase-2-Gemini).
//! (3) HyperKZG also includes optimisation based on so called Shplonk/HaloInfinite technique (`<https://hackmd.io/@adrian-aztec/BJxoyeCqj#Phase-2-Gemini>`).
//! Compared to pure HyperKZG, this optimisation in theory improves prover (at cost of using 1 fixed KZG opening) and verifier (at cost of eliminating MSM)
//!
#![allow(non_snake_case)]
Expand Down
15 changes: 8 additions & 7 deletions src/provider/tests/ipa_pc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
mod test {
use crate::provider::ipa_pc::EvaluationEngine;
use crate::provider::tests::solidity_compatibility_utils::{
ec_points_to_json, field_elements_to_json, generate_pcs_solidity_unit_test_data,
compressed_commitment_to_json, ec_points_to_json, field_elements_to_json,
generate_pcs_solidity_unit_test_data,
};

use crate::provider::GrumpkinEngine;
Expand Down Expand Up @@ -33,11 +34,11 @@ Grumpkin.GrumpkinAffinePoint[] memory ck_s = new Grumpkin.GrumpkinAffinePoint[](
uint256[] memory point = new uint256[]({{ len point }});
{{ #each point }} point[{{ i }}]={{ val }};\n {{ /each }}

Grumpkin.GrumpkinAffinePoint[] memory L_vec = new Grumpkin.GrumpkinAffinePoint[]({{ len L_vec }});
{{ #each L_vec }} L_vec[{{ i }}]=Grumpkin.GrumpkinAffinePoint({{ x }}, {{y}});\n {{ /each }}
uint256[] memory L_vec = new uint256[]({{ len L_vec }});
{{ #each L_vec }} L_vec[{{ i }}]={{ compressed }};\n {{ /each }}

Grumpkin.GrumpkinAffinePoint[] memory R_vec = new Grumpkin.GrumpkinAffinePoint[]({{ len R_vec }});
{{ #each R_vec }} R_vec[{{ i }}]=Grumpkin.GrumpkinAffinePoint({{ x }}, {{y}});\n {{ /each }}
uint256[] memory R_vec = new uint256[]({{ len R_vec }});
{{ #each R_vec }} R_vec[{{ i }}]={{ compressed }};\n {{ /each }}

uint256 a_hat = {{ a_hat }};

Expand Down Expand Up @@ -94,8 +95,8 @@ return keccak_transcript;
let l_vec = CommitmentKey::<GrumpkinEngine>::reinterpret_commitments_as_ck(&proof.L_vec)
.expect("can't reinterpred L_vec");

let r_vec_array = ec_points_to_json::<GrumpkinEngine>(&r_vec.ck);
let l_vec_array = ec_points_to_json::<GrumpkinEngine>(&l_vec.ck);
let r_vec_array = compressed_commitment_to_json::<GrumpkinEngine>(&r_vec.ck);
let l_vec_array = compressed_commitment_to_json::<GrumpkinEngine>(&l_vec.ck);
let point_array = field_elements_to_json::<GrumpkinEngine>(&point);
let ckv_array = ec_points_to_json::<GrumpkinEngine>(&vk.ck_v.ck);
let cks_array = ec_points_to_json::<GrumpkinEngine>(&vk.ck_s.ck);
Expand Down
25 changes: 25 additions & 0 deletions src/provider/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ pub mod solidity_compatibility_utils {
};
use group::prime::PrimeCurve;
use group::prime::PrimeCurveAffine;
use group::GroupEncoding;
use rand::rngs::StdRng;
use serde_json::{Map, Value};
use std::sync::Arc;
Expand Down Expand Up @@ -121,4 +122,28 @@ pub mod solidity_compatibility_utils {
});
value_vector
}

pub(crate) fn compressed_commitment_to_json<E>(
ec_points: &[<E::GE as PrimeCurve>::Affine],
) -> Vec<Value>
where
E: Engine,
E::GE: DlogGroup<ScalarExt = E::Scalar>,
{
let mut value_vector = vec![];
ec_points.iter().enumerate().for_each(|(i, ec_point)| {
let mut value = Map::new();
let compressed_commitment_info = ec_point.to_curve().to_bytes();
let mut data = compressed_commitment_info.as_ref().to_vec();
data.reverse();

value.insert("i".to_string(), Value::String(i.to_string()));
value.insert(
"compressed".to_string(),
Value::String(format!("0x{}", hex::encode(data))),
);
value_vector.push(Value::Object(value));
});
value_vector
}
}