diff --git a/halo2_proofs/Cargo.toml b/halo2_proofs/Cargo.toml index b611531f80..38b001bce8 100644 --- a/halo2_proofs/Cargo.toml +++ b/halo2_proofs/Cargo.toml @@ -52,6 +52,7 @@ halo2curves = { git = 'https://github.com/privacy-scaling-explorations/halo2curv rand_core = { version = "0.6", default-features = false } tracing = "0.1" blake2b_simd = "1" +sha3 = "0.9.1" # Developer tooling dependencies plotters = { version = "0.3.0", optional = true } diff --git a/halo2_proofs/src/poly/multiopen_test.rs b/halo2_proofs/src/poly/multiopen_test.rs index 1df8edaa03..8dd563b15a 100644 --- a/halo2_proofs/src/poly/multiopen_test.rs +++ b/halo2_proofs/src/poly/multiopen_test.rs @@ -13,8 +13,9 @@ mod test { }; use crate::poly::{Coeff, Polynomial}; use crate::transcript::{ - self, Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, TranscriptRead, - TranscriptReadBuffer, TranscriptWrite, TranscriptWriterBuffer, + self, Blake2bRead, Blake2bWrite, Challenge255, EncodedChallenge, Keccak256Read, + Keccak256Write, TranscriptRead, TranscriptReadBuffer, TranscriptWrite, + TranscriptWriterBuffer, }; use ff::Field; use group::{Curve, Group}; @@ -59,6 +60,43 @@ mod test { >(verifier_params, &proof[..], true); } + #[test] + fn test_roundtrip_ipa_keccak() { + use crate::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA}; + use crate::poly::ipa::multiopen::{ProverIPA, VerifierIPA}; + use crate::poly::ipa::strategy::AccumulatorStrategy; + use halo2curves::pasta::{Ep, EqAffine, Fp}; + + const K: u32 = 4; + + let params = ParamsIPA::::new(K); + + let proof = create_proof::< + IPACommitmentScheme, + ProverIPA<_>, + _, + Keccak256Write<_, _, Challenge255<_>>, + >(¶ms); + + let verifier_params = params.verifier_params(); + + verify::< + IPACommitmentScheme, + VerifierIPA<_>, + _, + Keccak256Read<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], false); + + verify::< + IPACommitmentScheme, + VerifierIPA<_>, + _, + Keccak256Read<_, _, Challenge255<_>>, + AccumulatorStrategy<_>, + >(verifier_params, &proof[..], true); + } + #[test] fn test_roundtrip_gwc() { use crate::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG}; diff --git a/halo2_proofs/src/transcript.rs b/halo2_proofs/src/transcript.rs index 5262f3c1c7..45c08df95b 100644 --- a/halo2_proofs/src/transcript.rs +++ b/halo2_proofs/src/transcript.rs @@ -3,6 +3,7 @@ use blake2b_simd::{Params as Blake2bParams, State as Blake2bState}; use group::ff::PrimeField; +use sha3::{Digest, Keccak256}; use std::convert::TryInto; use halo2curves::{Coordinates, CurveAffine, FieldExt}; @@ -19,6 +20,23 @@ const BLAKE2B_PREFIX_POINT: u8 = 1; /// Prefix to a prover's message containing a scalar const BLAKE2B_PREFIX_SCALAR: u8 = 2; +/// Prefix to a prover's message soliciting a challenge +const KECCAK256_PREFIX_CHALLENGE: u8 = 0; + +/// First prefix to a prover's message soliciting a challenge +/// Not included in the growing state! +const KECCAK256_PREFIX_CHALLENGE_LO: u8 = 10; + +/// Second prefix to a prover's message soliciting a challenge +/// Not included in the growing state! +const KECCAK256_PREFIX_CHALLENGE_HI: u8 = 11; + +/// Prefix to a prover's message containing a curve point +const KECCAK256_PREFIX_POINT: u8 = 1; + +/// Prefix to a prover's message containing a scalar +const KECCAK256_PREFIX_SCALAR: u8 = 2; + /// Generic transcript view (from either the prover or verifier's perspective) pub trait Transcript> { /// Squeeze an encoded verifier challenge from the transcript. @@ -88,6 +106,14 @@ pub struct Blake2bRead> { _marker: PhantomData<(C, E)>, } +/// Keccak256 hash function reader for EVM compatibility +#[derive(Debug, Clone)] +pub struct Keccak256Read> { + state: Keccak256, + reader: R, + _marker: PhantomData<(C, E)>, +} + impl TranscriptReadBuffer> for Blake2bRead> { @@ -104,6 +130,21 @@ impl TranscriptReadBuffer> } } +impl TranscriptReadBuffer> + for Keccak256Read> +{ + /// Initialize a transcript given an input buffer. + fn init(reader: R) -> Self { + let mut state = Keccak256::new(); + state.update(b"Halo2-Transcript"); + Keccak256Read { + state, + reader, + _marker: PhantomData, + } + } +} + impl TranscriptRead> for Blake2bRead> { @@ -133,6 +174,35 @@ impl TranscriptRead> } } +impl TranscriptRead> + for Keccak256Read> +{ + fn read_point(&mut self) -> io::Result { + let mut compressed = C::Repr::default(); + self.reader.read_exact(compressed.as_mut())?; + let point: C = Option::from(C::from_bytes(&compressed)).ok_or_else(|| { + io::Error::new(io::ErrorKind::Other, "invalid point encoding in proof") + })?; + self.common_point(point)?; + + Ok(point) + } + + fn read_scalar(&mut self) -> io::Result { + let mut data = ::Repr::default(); + self.reader.read_exact(data.as_mut())?; + let scalar: C::Scalar = Option::from(C::Scalar::from_repr(data)).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "invalid field element encoding in proof", + ) + })?; + self.common_scalar(scalar)?; + + Ok(scalar) + } +} + impl Transcript> for Blake2bRead> { @@ -165,6 +235,48 @@ impl Transcript> } } +impl Transcript> + for Keccak256Read> +{ + fn squeeze_challenge(&mut self) -> Challenge255 { + self.state.update(&[KECCAK256_PREFIX_CHALLENGE]); + + let mut state_lo = self.state.clone(); + let mut state_hi = self.state.clone(); + state_lo.update(&[KECCAK256_PREFIX_CHALLENGE_LO]); + state_hi.update(&[KECCAK256_PREFIX_CHALLENGE_HI]); + let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap(); + let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap(); + + let mut t = result_lo.to_vec(); + t.extend_from_slice(&result_hi[..]); + let result: [u8; 64] = t.as_slice().try_into().unwrap(); + + Challenge255::::new(&result) + } + + fn common_point(&mut self, point: C) -> io::Result<()> { + self.state.update(&[KECCAK256_PREFIX_POINT]); + let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "cannot write points at infinity to the transcript", + ) + })?; + self.state.update(coords.x().to_repr().as_ref()); + self.state.update(coords.y().to_repr().as_ref()); + + Ok(()) + } + + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.state.update(&[KECCAK256_PREFIX_SCALAR]); + self.state.update(scalar.to_repr().as_ref()); + + Ok(()) + } +} + /// We will replace BLAKE2b with an algebraic hash function in a later version. #[derive(Debug, Clone)] pub struct Blake2bWrite> { @@ -173,9 +285,18 @@ pub struct Blake2bWrite> { _marker: PhantomData<(C, E)>, } +/// Keccak256 hash function writer for EVM compatibility +#[derive(Debug, Clone)] +pub struct Keccak256Write> { + state: Keccak256, + writer: W, + _marker: PhantomData<(C, E)>, +} + impl TranscriptWriterBuffer> for Blake2bWrite> { + /// Initialize a transcript given an output buffer. fn init(writer: W) -> Self { Blake2bWrite { state: Blake2bParams::new() @@ -193,6 +314,27 @@ impl TranscriptWriterBuffer> } } +impl TranscriptWriterBuffer> + for Keccak256Write> +{ + /// Initialize a transcript given an output buffer. + fn init(writer: W) -> Self { + let mut state = Keccak256::new(); + state.update(b"Halo2-Transcript"); + Keccak256Write { + state, + writer, + _marker: PhantomData, + } + } + + /// Conclude the interaction and return the output buffer (writer). + fn finalize(self) -> W { + // TODO: handle outstanding scalars? see issue #138 + self.writer + } +} + impl TranscriptWrite> for Blake2bWrite> { @@ -208,6 +350,21 @@ impl TranscriptWrite> } } +impl TranscriptWrite> + for Keccak256Write> +{ + fn write_point(&mut self, point: C) -> io::Result<()> { + self.common_point(point)?; + let compressed = point.to_bytes(); + self.writer.write_all(compressed.as_ref()) + } + fn write_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.common_scalar(scalar)?; + let data = scalar.to_repr(); + self.writer.write_all(data.as_ref()) + } +} + impl Transcript> for Blake2bWrite> { @@ -240,6 +397,48 @@ impl Transcript> } } +impl Transcript> + for Keccak256Write> +{ + fn squeeze_challenge(&mut self) -> Challenge255 { + self.state.update(&[KECCAK256_PREFIX_CHALLENGE]); + + let mut state_lo = self.state.clone(); + let mut state_hi = self.state.clone(); + state_lo.update(&[KECCAK256_PREFIX_CHALLENGE_LO]); + state_hi.update(&[KECCAK256_PREFIX_CHALLENGE_HI]); + let result_lo: [u8; 32] = state_lo.finalize().as_slice().try_into().unwrap(); + let result_hi: [u8; 32] = state_hi.finalize().as_slice().try_into().unwrap(); + + let mut t = result_lo.to_vec(); + t.extend_from_slice(&result_hi[..]); + let result: [u8; 64] = t.as_slice().try_into().unwrap(); + + Challenge255::::new(&result) + } + + fn common_point(&mut self, point: C) -> io::Result<()> { + self.state.update(&[KECCAK256_PREFIX_POINT]); + let coords: Coordinates = Option::from(point.coordinates()).ok_or_else(|| { + io::Error::new( + io::ErrorKind::Other, + "cannot write points at infinity to the transcript", + ) + })?; + self.state.update(coords.x().to_repr().as_ref()); + self.state.update(coords.y().to_repr().as_ref()); + + Ok(()) + } + + fn common_scalar(&mut self, scalar: C::Scalar) -> io::Result<()> { + self.state.update(&[KECCAK256_PREFIX_SCALAR]); + self.state.update(scalar.to_repr().as_ref()); + + Ok(()) + } +} + /// The scalar representation of a verifier challenge. /// /// The `Type` type can be used to scope the challenge to a specific context, or