Skip to content

Commit

Permalink
Merge keccak and blake verifier versions
Browse files Browse the repository at this point in the history
  • Loading branch information
fmkra committed Feb 9, 2024
1 parent 2df70db commit ca56a2f
Show file tree
Hide file tree
Showing 7 changed files with 89 additions and 14 deletions.
2 changes: 1 addition & 1 deletion src/air/public_input.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl PublicInputImpl of PublicInputTrait {
.update_with(AddrValueSize * self.main_page.len());
let main_page_hash = main_page_hash_state.finalize();

let mut hash_data = ArrayTrait::<u32>::new(); // TODO: u64 for keccak
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak
ArrayAppendTrait::<_, u256>::append_big_endian(ref hash_data, (*self.log_n_steps).into());
ArrayAppendTrait::<_, u256>::append_big_endian(ref hash_data, (*self.rc_min).into());
ArrayAppendTrait::<_, u256>::append_big_endian(ref hash_data, (*self.rc_max).into());
Expand Down
2 changes: 1 addition & 1 deletion src/channel/channel.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl ChannelImpl of ChannelTrait {
}

fn read_truncated_hash_from_prover(ref self: Channel, value: felt252) {
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak
let mut hash_data = ArrayTrait::<u64>::new(); // u32 for blake, u64 for keccak

assert(self.digest.low != BoundedU128::max(), 'digest low is 2^128-1');
hash_data.append_big_endian(self.digest + 1);
Expand Down
66 changes: 66 additions & 0 deletions src/common/array_append.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -179,3 +179,69 @@ impl ArrayU8AppendU16 of ArrayAppendTrait<u8, u16> {
self.append(low);
}
}

impl ArrayU64AppendU256 of ArrayAppendTrait<u64, u256> {
fn append_little_endian(ref self: Array<u64>, element: u256) {
self.append_little_endian(element.low);
self.append_little_endian(element.high);
}

fn append_big_endian(ref self: Array<u64>, element: u256) {
self.append_big_endian(element.high);
self.append_big_endian(element.low);
}
}

impl ArrayU64AppendU128 of ArrayAppendTrait<u64, u128> {
fn append_little_endian(ref self: Array<u64>, mut element: u128) {
let (high, low) = u128_split(element);
self.append(low);
self.append(high);
}

fn append_big_endian(ref self: Array<u64>, mut element: u128) {
let (high, low) = u128_split(element);
self.append(high.flip_endianness());
self.append(low.flip_endianness());
}
}

impl ArrayU64AppendU64 of ArrayAppendTrait<u64, u64> {
fn append_little_endian(ref self: Array<u64>, mut element: u64) {
self.append(element);
}

fn append_big_endian(ref self: Array<u64>, mut element: u64) {
self.append(element.flip_endianness());
}
}

impl ArrayU64AppendFelt of ArrayAppendTrait<u64, felt252> {
fn append_little_endian(ref self: Array<u64>, element: felt252) {
self.append_little_endian(Into::<felt252, u256>::into(element));
}

fn append_big_endian(ref self: Array<u64>, element: felt252) {
self.append_big_endian(Into::<felt252, u256>::into(element));
}
}

impl ArrayU64AppendFeltSpan of ArrayAppendTrait<u64, Span<felt252>> {
fn append_little_endian(ref self: Array<u64>, mut element: Span<felt252>) {
loop {
match element.pop_front() {
Option::Some(element) => { self.append_big_endian(*element); },
Option::None => { break; }
}
};
}

fn append_big_endian(ref self: Array<u64>, mut element: Span<felt252>) {
loop {
match element.pop_front() {
Option::Some(element) => { self.append_big_endian(*element); },
Option::None => { break; }
}
};
}
}
22 changes: 16 additions & 6 deletions src/common/hasher.cairo
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
use cairo_verifier::common::{blake2s::blake2s, blake2s::truncated_blake2s, blake2s_u8::blake2s as blake2s_u8};
use cairo_verifier::common::{blake2s::blake2s, blake2s::truncated_blake2s, blake2s_u8::blake2s as blake2s_u8, flip_endianness::FlipEndiannessTrait};

// blake hasher

// fn hash_u8(data: Array<u8>) -> u256 {
// fn hash_n_bytes(mut data: Array<u8>, n: u8, hash_len: bool) -> u256 {
// if hash_len {
// data.append(n);
// }
// blake2s_u8(data)
// }

Expand All @@ -16,12 +19,19 @@ use cairo_verifier::common::{blake2s::blake2s, blake2s::truncated_blake2s, blake

// keccak hasher

fn hash_u8(data: Array<u64>) -> u256 {

fn hash_n_bytes(mut data: Array<u64>, n: u8, hash_len: bool) -> u256 {
if hash_len {
keccak::cairo_keccak(ref data, n.into(), 1)
} else {
keccak::cairo_keccak(ref data, 0, 0)
}
}

fn hash_truncated(data: Array<u64>) -> felt252 {

fn hash_truncated(mut data: Array<u64>) -> felt252 {
(keccak::cairo_keccak(ref data, 0, 0)
.flip_endianness() % 0x10000000000000000000000000000000000000000)
.try_into()
.unwrap()
}

fn hash(mut data: Array<u64>) -> u256 {
Expand Down
7 changes: 3 additions & 4 deletions src/proof_of_work/proof_of_work.cairo
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use cairo_verifier::{
common::{
flip_endianness::FlipEndiannessTrait, array_print::{SpanPrintTrait, ArrayPrintTrait},
hasher::hash_u8, array_append::ArrayAppendTrait, math::pow,
hasher::hash_n_bytes, array_append::ArrayAppendTrait, math::pow,
},
channel::channel::{Channel, ChannelTrait}, proof_of_work::config::{ProofOfWorkConfig}
};
Expand Down Expand Up @@ -29,8 +29,7 @@ fn verify_proof_of_work(digest: u256, n_bits: u8, nonce: u64) {
let mut init_hash_data = ArrayTrait::new(); // u8 with blake, u64 with keccak
init_hash_data.append_big_endian(MAGIC);
init_hash_data.append_big_endian(digest);
init_hash_data.append(n_bits);
let init_hash = hash_u8(init_hash_data).flip_endianness();
let init_hash = hash_n_bytes(init_hash_data, n_bits.into(), true).flip_endianness();

// Compute Hash(init_hash || nonce )
// 32 bytes || 8 bytes
Expand All @@ -39,7 +38,7 @@ fn verify_proof_of_work(digest: u256, n_bits: u8, nonce: u64) {
let mut hash_data = ArrayTrait::new(); // u8 with blake, u64 with keccak
hash_data.append_big_endian(init_hash);
hash_data.append_big_endian(nonce);
let hash = hash_u8(hash_data).flip_endianness();
let hash = hash_n_bytes(hash_data, 0, false).flip_endianness();

let work_limit = pow(2, 128 - n_bits.into());
assert(
Expand Down
2 changes: 1 addition & 1 deletion src/table_commitment/table_commitment.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ fn generate_vector_queries(
*values[i * n_columns]
} else {
let slice = values.slice(i * n_columns, n_columns);
let mut data: Array<u32> = ArrayTrait::new();
let mut data = ArrayTrait::new(); // u32 for blake, u64 for keccak
data.append_big_endian(slice);
hash_truncated(data)
};
Expand Down
2 changes: 1 addition & 1 deletion src/vector_commitment/vector_commitment.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ fn hash_blake_or_pedersen(x: felt252, y: felt252, is_verifier_friendly: bool) ->
if is_verifier_friendly {
PedersenTrait::new(x).update(y).finalize()
} else {
let mut data = ArrayTrait::<u32>::new();
let mut data = ArrayTrait::new(); // u32 for blake, u64 for keccak
data.append_big_endian(x);
data.append_big_endian(y);
hash_truncated(data)
Expand Down

0 comments on commit ca56a2f

Please sign in to comment.