Skip to content

Commit

Permalink
Change channel to poseidon
Browse files Browse the repository at this point in the history
  • Loading branch information
fmkra committed Feb 22, 2024
1 parent f2e5ecc commit 1cde4ca
Show file tree
Hide file tree
Showing 5 changed files with 67 additions and 70 deletions.
130 changes: 63 additions & 67 deletions src/channel/channel.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -6,49 +6,55 @@ use cairo_verifier::common::{
MONTGOMERY_R_INVERSE
}
};
use poseidon::poseidon_hash_span;
use poseidon::{hades_permutation, poseidon_hash_span};
use core::integer::BoundedU128;

#[derive(Drop)]
struct Channel {
digest: u256,
counter: u256,
digest: felt252,
counter: felt252,
}

#[generate_trait]
impl ChannelImpl of ChannelTrait {
fn new(digest: u256) -> Channel {
fn new(digest: felt252) -> Channel {
Channel { digest: digest, counter: 0 }
}

fn new_with_counter(digest: u256, counter: u256) -> Channel {
fn new_with_counter(digest: felt252, counter: felt252) -> Channel {
Channel { digest: digest, counter: counter }
}

fn random_uint256_to_prover(ref self: Channel) -> u256 {
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak
hash_data.append_big_endian(self.digest);
hash_data.append_big_endian(self.counter);
self.counter += 1;
hash(hash_data).flip_endianness()
}
// fn random_uint256_to_prover(ref self: Channel) -> u256 {
// let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak
// hash_data.append_big_endian(self.digest);
// hash_data.append_big_endian(self.counter);
// self.counter += 1;
// hash(hash_data).flip_endianness()
// }

// fn random_felt_to_prover(ref self: Channel) -> felt252 {
// let mut res: felt252 = 0;

// // To ensure a uniform distribution over field elements, if the generated 256-bit number x is in
// // range [0, C * PRIME), take x % PRIME. Otherwise, regenerate.
// // The maximal possible C is 2**256//PRIME = 31.

// loop {
// let rand = self.random_uint256_to_prover();
// if (rand < u256 { low: C_PRIME_AS_UINT256_LOW, high: C_PRIME_AS_UINT256_HIGH }) {
// let to_append = (rand % STARK_PRIME).try_into().unwrap();
// res = to_append * MONTGOMERY_R_INVERSE;
// break;
// }
// };
// res
// }

fn random_felt_to_prover(ref self: Channel) -> felt252 {
let mut res: felt252 = 0;

// To ensure a uniform distribution over field elements, if the generated 256-bit number x is in
// range [0, C * PRIME), take x % PRIME. Otherwise, regenerate.
// The maximal possible C is 2**256//PRIME = 31.

loop {
let rand = self.random_uint256_to_prover();
if (rand < u256 { low: C_PRIME_AS_UINT256_LOW, high: C_PRIME_AS_UINT256_HIGH }) {
let to_append = (rand % STARK_PRIME).try_into().unwrap();
res = to_append * MONTGOMERY_R_INVERSE;
break;
}
};
res
let (hash, _, _) = hades_permutation(self.digest, self.counter, 2);
self.counter += 1;
hash
}

fn random_felts_to_prover(ref self: Channel, mut n: felt252) -> Array<felt252> {
Expand All @@ -63,63 +69,53 @@ impl ChannelImpl of ChannelTrait {
};
res
}
// not sure what to do with it
// fn read_truncated_hash_from_prover(ref self: Channel, value: felt252) {
// let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak

fn read_truncated_hash_from_prover(ref self: Channel, value: felt252) {
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak

assert(self.digest.low != BoundedU128::max(), 'digest low is 2^128-1');
hash_data.append_big_endian(self.digest + 1);
hash_data.append_big_endian(value);

self.digest = hash(hash_data).flip_endianness();
self.counter = 0;
}
// assert(self.digest.low != BoundedU128::max(), 'digest low is 2^128-1');
// hash_data.append_big_endian(self.digest + 1);
// hash_data.append_big_endian(value);

fn read_felt_from_prover(ref self: Channel, value: felt252) {
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak
// self.digest = hash(hash_data).flip_endianness();
// self.counter = 0;
// }

assert(self.digest.low != BoundedU128::max(), 'digest low is 2^128-1');
hash_data.append_big_endian(self.digest + 1);
hash_data.append_big_endian(value * MONTGOMERY_R);
fn read_felt_from_prover(ref self: Channel, value: felt252) {
let mut hash_data = ArrayTrait::new();
hash_data.append(self.digest + 1);
hash_data.append(value);

self.digest = hash(hash_data).flip_endianness();
self.counter = 0;
}
self.digest = poseidon_hash_span(hash_data.span());
self.counter = 0;
}

fn read_felts_from_prover(ref self: Channel, mut values: Span<felt252>) {
loop {
match values.pop_front() {
Option::Some(value) => { self.read_felt_from_prover(*value); },
Option::None => { break; }
}
}
}
// fn read_felts_from_prover(ref self: Channel, mut values: Span<felt252>) {
// loop {
// match values.pop_front() {
// Option::Some(value) => { self.read_felt_from_prover(*value); },
// Option::None => { break; }
// }
// }
// }

fn read_felt_vector_from_prover(ref self: Channel, mut values: Span<felt252>) {
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak
let mut hash_data = ArrayTrait::new();

assert(self.digest.low != BoundedU128::max(), 'digest low is 2^128-1');
hash_data.append_big_endian(self.digest + 1);
hash_data.append(self.digest + 1);

loop {
match values.pop_front() {
Option::Some(value) => { hash_data.append_big_endian(*value * MONTGOMERY_R); },
Option::Some(value) => { hash_data.append(*value); },
Option::None => { break; }
}
};

self.digest = hash(hash_data).flip_endianness();
self.digest = poseidon_hash_span(hash_data.span());
self.counter = 0;
}

fn read_uint64_from_prover(ref self: Channel, value: u64) {
let mut hash_data = ArrayTrait::new(); // u32 for blake, u64 for keccak

assert(self.digest.low != BoundedU128::max(), 'digest low is 2^128-1');
hash_data.append_big_endian(self.digest + 1);
hash_data.append_big_endian(value);

self.digest = hash(hash_data).flip_endianness();
self.counter = 0;
self.read_felt_from_prover(value.into())
}
}
}
2 changes: 1 addition & 1 deletion src/proof_of_work/proof_of_work.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ struct ProofOfWorkUnsentCommitment {
fn proof_of_work_commit(
ref channel: Channel, unsent_commitment: ProofOfWorkUnsentCommitment, config: ProofOfWorkConfig
) {
verify_proof_of_work(channel.digest, config.n_bits, unsent_commitment.nonce);
verify_proof_of_work(channel.digest.into(), config.n_bits, unsent_commitment.nonce);
channel.read_uint64_from_prover(unsent_commitment.nonce);
}

Expand Down
1 change: 1 addition & 0 deletions src/queries/queries.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ fn sample_random_queries(
break;
}

// TODO: (hashing) fix this
let res = channel.random_uint256_to_prover();

let (hh, hl) = DivRem::div_rem(res.high, u64_modulus_nonzero);
Expand Down
2 changes: 1 addition & 1 deletion src/stark/stark_commit.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ fn stark_commit(
let interaction_after_composition = channel.random_felt_to_prover();

// Read OODS values.
channel.read_felts_from_prover(*unsent_commitment.oods_values);
channel.read_felt_vector_from_prover(*unsent_commitment.oods_values);

// Check that the trace and the composition agree at oods_point.
verify_oods(
Expand Down
2 changes: 1 addition & 1 deletion src/vector_commitment/vector_commitment.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ struct VectorCommitmentWitness {
fn vector_commit(
ref channel: Channel, unsent_commitment: felt252, config: VectorCommitmentConfig
) -> VectorCommitment {
channel.read_truncated_hash_from_prover(unsent_commitment); // commitment is being sent
channel.read_felt_from_prover(unsent_commitment); // commitment is being sent
VectorCommitment { config: config, commitment_hash: unsent_commitment, }
}

Expand Down

0 comments on commit 1cde4ca

Please sign in to comment.