Skip to content

Commit

Permalink
Clean up generate_vector_queries function
Browse files Browse the repository at this point in the history
  • Loading branch information
fmkra committed Jan 12, 2024
1 parent dbe4491 commit 5ec9b51
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 40 deletions.
23 changes: 23 additions & 0 deletions src/common/array_append.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,29 @@ impl ArrayU32AppendFelt of ArrayAppendTrait<u32, felt252> {
}
}

impl ArrayU32AppendFeltsSpan of ArrayAppendTrait<u32, Span<felt252>> {
fn append_little_endian(ref self: Array<u32>, element: Span<felt252>) {
let mut i = 0;
loop {
if i == element.len() {
break;
}
self.append_little_endian(*element[i]);
i += 1;
};
}
fn append_big_endian(ref self: Array<u32>, element: Span<felt252>) {
let mut i = 0;
loop {
if i == element.len() {
break;
}
self.append_big_endian(*element[i]);
i += 1;
};
}
}

impl ArrayU32AppendU128 of ArrayAppendTrait<u32, u128> {
fn append_little_endian(ref self: Array<u32>, mut element: u128) {
let mut i = 4;
Expand Down
60 changes: 20 additions & 40 deletions src/table_commitment/table_commitment.cairo
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ fn table_decommit(
let montgomery_values = to_montgomery(decommitment.values);

// Generate queries to the underlying vector commitment.
// TODO: change n_columns type to u32 in config
let vector_queries = generate_vector_queries(
queries,
montgomery_values.span(),
Expand Down Expand Up @@ -124,47 +123,28 @@ fn generate_vector_queries(
if i == queries_len {
break;
}
if n_columns == 1 {
vector_queries
.append(VectorQuery { index: *queries[curr_queries], value: *values[curr_values] });
curr_queries += 1;
curr_values += 1;
} else if is_verifier_friendly == false {
let mut data: Array<u32> = ArrayTrait::new();

// TODO: extract to separate function and use span's slice here
let mut j = 0;
loop {
if j == n_columns {
break;
}
data.append_big_endian(*values[curr_values]);
curr_values += 1;
j += 1;
};

let hash = blake2s(data).flip_endianness();

// Truncate hash - convert value to felt, by taking the 160 least significant bits
// TODO: check if we can use truncated_blake2s here
let two_pow32: u128 = 0x100000000;
let (high_h, high_l) = DivRem::div_rem(hash.high, two_pow32.try_into().unwrap());
vector_queries
.append(
VectorQuery {
index: *queries[curr_queries],
value: high_l.into() * 0x100000000000000000000000000000000 + hash.low.into()
}
);
curr_queries += 1;
let hash = if n_columns == 1 {
*values[curr_values]
} else {
let hash = poseidon_hash_span(values.slice(curr_values, n_columns));

vector_queries.append(VectorQuery { index: *queries[curr_queries], value: hash });

curr_values += n_columns;
curr_queries += 1;
let slice = values.slice(curr_values, n_columns);
if is_verifier_friendly {
poseidon_hash_span(slice)
} else {
let mut data: Array<u32> = ArrayTrait::new();
data.append_big_endian(slice);

// Truncate hash - convert value to felt, by taking the 160 least significant bits
let hash: felt252 = (blake2s(data)
.flip_endianness() % 0x10000000000000000000000000000000000000000)
.try_into()
.unwrap();

hash
}
};
vector_queries.append(VectorQuery { index: *queries[curr_queries], value: hash });
curr_values += n_columns;
curr_queries += 1;
i += 1;
};
vector_queries
Expand Down

0 comments on commit 5ec9b51

Please sign in to comment.