Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

- Added db macro #388

Closed
wants to merge 13 commits into from
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

33 changes: 33 additions & 0 deletions common/db/src/db_macro.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#[macro_export]
macro_rules! create_db {
($db_name: ident
{ $($field_name: ident: $field_type: ty),*}
) => {
fn db_key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
let db_len = u8::try_from(db_dst.len()).unwrap();
let dst_len = u8::try_from(item_dst.len()).unwrap();
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
}

$(
#[derive(Clone, Debug)]
pub struct $field_name;
impl $field_name {
pub fn key(key: impl AsRef<[u8]>) -> Vec<u8> {
db_key(stringify!($db_name).as_bytes(), stringify!($field_name).as_bytes(), key)
}
#[allow(dead_code)]
pub fn set(txn: &mut impl DbTxn, key: impl AsRef<[u8]>, data: &impl serde::Serialize) {
let key = $field_name::key(key);
txn.put(&key, bincode::serialize(data).unwrap());
}
#[allow(dead_code)]
pub fn get(getter: &impl Get, key: impl AsRef<[u8]>) -> Option<$field_type> {
getter.get($field_name::key(key)).map(|data| {
bincode::deserialize(data.as_ref()).unwrap()
})
}
}
)*
};
}
2 changes: 2 additions & 0 deletions common/db/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
mod mem;
pub use mem::*;
mod db_macro;
pub use db_macro::*;

#[cfg(feature = "rocksdb")]
mod rocks;
Expand Down
2 changes: 2 additions & 0 deletions coordinator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ tributary = { package = "tributary-chain", path = "./tributary" }
serai-client = { path = "../substrate/client", features = ["serai"] }

hex = "0.4"
bincode = { version = "1", default-features = false }
serde = { version = "1", default-features = false, features = ["derive"] }
serde_json = { version = "1", default-features = false }

log = "0.4"
Expand Down
50 changes: 11 additions & 39 deletions processor/src/db.rs
Original file line number Diff line number Diff line change
@@ -1,51 +1,23 @@
use core::marker::PhantomData;
use std::io::Read;

use scale::{Encode, Decode};
use scale::Decode;
use serai_client::validator_sets::primitives::{ValidatorSet, KeyPair};

pub use serai_db::*;

use crate::networks::{Block, Network};

#[derive(Debug)]
pub struct MainDb<N: Network, D: Db>(D, PhantomData<N>);
impl<N: Network, D: Db> MainDb<N, D> {
pub fn new(db: D) -> Self {
Self(db, PhantomData)
}

fn main_key(dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
D::key(b"MAIN", dst, key)
create_db!(
MainDb {
HandledMessageDb: Vec<u8>,
PendingActivationsDb: Vec<u8>
}
);

fn handled_key(id: u64) -> Vec<u8> {
Self::main_key(b"handled", id.to_le_bytes())
}
pub fn handled_message(&self, id: u64) -> bool {
self.0.get(Self::handled_key(id)).is_some()
}
pub fn handle_message(txn: &mut D::Transaction<'_>, id: u64) {
txn.put(Self::handled_key(id), [])
}

fn pending_activation_key() -> Vec<u8> {
Self::main_key(b"pending_activation", [])
}
pub fn set_pending_activation(
txn: &mut D::Transaction<'_>,
block_before_queue_block: <N::Block as Block<N>>::Id,
set: ValidatorSet,
key_pair: KeyPair,
) {
let mut buf = (set, key_pair).encode();
buf.extend(block_before_queue_block.as_ref());
txn.put(Self::pending_activation_key(), buf);
}
pub fn pending_activation<G: Get>(
impl PendingActivationsDb {
pub fn pending_activation<N: Network, G: Get>(
getter: &G,
) -> Option<(<N::Block as Block<N>>::Id, ValidatorSet, KeyPair)> {
if let Some(bytes) = getter.get(Self::pending_activation_key()) {
if let Some(bytes) = getter.get(PendingActivationsDb::key([])) {
if !bytes.is_empty() {
let mut slice = bytes.as_slice();
let (set, key_pair) = <(ValidatorSet, KeyPair)>::decode(&mut slice).unwrap();
Expand All @@ -57,7 +29,7 @@ impl<N: Network, D: Db> MainDb<N, D> {
}
None
}
pub fn clear_pending_activation(txn: &mut D::Transaction<'_>) {
txn.put(Self::pending_activation_key(), []);
pub fn clear_pending_activation(txn: &mut impl DbTxn) {
txn.put(Self::key([]), []);
}
}
165 changes: 66 additions & 99 deletions processor/src/key_gen.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use core::marker::PhantomData;
use std::collections::HashMap;

use serai_db::create_db;
use zeroize::Zeroizing;

use rand_core::SeedableRng;
Expand All @@ -27,104 +27,71 @@ pub struct KeyConfirmed<C: Ciphersuite> {
pub network_keys: ThresholdKeys<C>,
}

#[derive(Clone, Debug)]
struct KeyGenDb<N: Network, D: Db>(PhantomData<D>, PhantomData<N>);
impl<N: Network, D: Db> KeyGenDb<N, D> {
fn key_gen_key(dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
D::key(b"KEY_GEN", dst, key)
}

fn params_key(set: &ValidatorSet) -> Vec<u8> {
Self::key_gen_key(b"params", set.encode())
}
fn save_params(txn: &mut D::Transaction<'_>, set: &ValidatorSet, params: &ThresholdParams) {
txn.put(Self::params_key(set), bincode::serialize(params).unwrap());
}
fn params<G: Get>(getter: &G, set: &ValidatorSet) -> Option<ThresholdParams> {
getter.get(Self::params_key(set)).map(|bytes| bincode::deserialize(&bytes).unwrap())
create_db!(
KeyGenDb {
ParamsDb: ThresholdParams,
CommitmentsDb: HashMap<Participant, Vec<u8>>,
GeneratedKeysDb: Vec<u8>,
KeysDb: Vec<u8>
}
);

#[allow(clippy::type_complexity)]
fn read_keys<N: Network>(
getter: &impl Get,
key: &[u8],
) -> Option<(Vec<u8>, (ThresholdKeys<Ristretto>, ThresholdKeys<N::Curve>))> {
let keys_vec = getter.get(key)?;
let mut keys_ref: &[u8] = keys_vec.as_ref();
let substrate_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap());
let mut network_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap());
N::tweak_keys(&mut network_keys);
Some((keys_vec, (substrate_keys, network_keys)))
}

// Not scoped to the set since that'd have latter attempts overwrite former
// A former attempt may become the finalized attempt, even if it doesn't in a timely manner
// Overwriting its commitments would be accordingly poor
fn commitments_key(id: &KeyGenId) -> Vec<u8> {
Self::key_gen_key(b"commitments", id.encode())
}
fn save_commitments(
txn: &mut D::Transaction<'_>,
id: &KeyGenId,
commitments: &HashMap<Participant, Vec<u8>>,
) {
txn.put(Self::commitments_key(id), bincode::serialize(commitments).unwrap());
}
fn commitments<G: Get>(getter: &G, id: &KeyGenId) -> HashMap<Participant, Vec<u8>> {
bincode::deserialize::<HashMap<Participant, Vec<u8>>>(
&getter.get(Self::commitments_key(id)).unwrap(),
)
.unwrap()
}
fn confirm_keys<N: Network>(
txn: &mut impl DbTxn,
set: ValidatorSet,
key_pair: KeyPair,
) -> (ThresholdKeys<Ristretto>, ThresholdKeys<N::Curve>) {
let val: &[u8] = key_pair.1.as_ref();
let (keys_vec, keys) =
read_keys::<N>(txn, &GeneratedKeysDb::key((set, (&key_pair.0 .0, val)).encode())).unwrap();
assert_eq!(key_pair.0 .0, keys.0.group_key().to_bytes());
assert_eq!(
{
let network_key: &[u8] = key_pair.1.as_ref();
network_key
},
keys.1.group_key().to_bytes().as_ref(),
);
txn.put(KeysDb::key(keys.1.group_key().to_bytes()), keys_vec);
keys
}

fn generated_keys_key(set: ValidatorSet, key_pair: (&[u8; 32], &[u8])) -> Vec<u8> {
Self::key_gen_key(b"generated_keys", (set, key_pair).encode())
}
fn save_keys(
txn: &mut D::Transaction<'_>,
fn keys<N: Network>(
getter: &impl Get,
key: &<N::Curve as Ciphersuite>::G,
) -> Option<(ThresholdKeys<Ristretto>, ThresholdKeys<N::Curve>)> {
let res = read_keys::<N>(getter, &KeysDb::key(key.to_bytes()))?.1;
assert_eq!(&res.1.group_key(), key);
Some(res)
}
impl GeneratedKeysDb {
fn save_keys<N: Network>(
txn: &mut impl DbTxn,
id: &KeyGenId,
substrate_keys: &ThresholdCore<Ristretto>,
network_keys: &ThresholdKeys<N::Curve>,
) {
let mut keys = substrate_keys.serialize();
keys.extend(network_keys.serialize().iter());
txn.put(
Self::generated_keys_key(
id.set,
(&substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()),
),
keys,
);
}

fn keys_key(key: &<N::Curve as Ciphersuite>::G) -> Vec<u8> {
Self::key_gen_key(b"keys", key.to_bytes())
}
#[allow(clippy::type_complexity)]
fn read_keys<G: Get>(
getter: &G,
key: &[u8],
) -> Option<(Vec<u8>, (ThresholdKeys<Ristretto>, ThresholdKeys<N::Curve>))> {
let keys_vec = getter.get(key)?;
let mut keys_ref: &[u8] = keys_vec.as_ref();
let substrate_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap());
let mut network_keys = ThresholdKeys::new(ThresholdCore::read(&mut keys_ref).unwrap());
N::tweak_keys(&mut network_keys);
Some((keys_vec, (substrate_keys, network_keys)))
}
fn confirm_keys(
txn: &mut D::Transaction<'_>,
set: ValidatorSet,
key_pair: KeyPair,
) -> (ThresholdKeys<Ristretto>, ThresholdKeys<N::Curve>) {
let (keys_vec, keys) =
Self::read_keys(txn, &Self::generated_keys_key(set, (&key_pair.0 .0, key_pair.1.as_ref())))
.unwrap();
assert_eq!(key_pair.0 .0, keys.0.group_key().to_bytes());
assert_eq!(
{
let network_key: &[u8] = key_pair.1.as_ref();
network_key
},
keys.1.group_key().to_bytes().as_ref(),
);
txn.put(Self::keys_key(&keys.1.group_key()), keys_vec);
keys
}
fn keys<G: Get>(
getter: &G,
key: &<N::Curve as Ciphersuite>::G,
) -> Option<(ThresholdKeys<Ristretto>, ThresholdKeys<N::Curve>)> {
let res = Self::read_keys(getter, &Self::keys_key(key))?.1;
assert_eq!(&res.1.group_key(), key);
Some(res)
let key = (
id.set,
(&substrate_keys.group_key().to_bytes(), network_keys.group_key().to_bytes().as_ref()),
)
.encode();
txn.put(Self::key(key), keys);
}
}

Expand All @@ -149,7 +116,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {

pub fn in_set(&self, set: &ValidatorSet) -> bool {
// We determine if we're in set using if we have the parameters for a set's key generation
KeyGenDb::<N, D>::params(&self.db, set).is_some()
ParamsDb::get(&self.db, set.encode()).is_some()
}

pub fn keys(
Expand All @@ -165,7 +132,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
// The only other concern is if it's set when it's not safe to use
// The keys are only written on confirmation, and the transaction writing them is atomic to
// every associated operation
KeyGenDb::<N, D>::keys(&self.db, key)
keys::<N>(&self.db, key)
}

pub async fn handle(
Expand Down Expand Up @@ -207,7 +174,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
self.active_share.remove(&id.set).is_none()
{
// If we haven't handled this set before, save the params
KeyGenDb::<N, D>::save_params(txn, &id.set, &params);
ParamsDb::set(txn, &id.set.encode(), &params);
}

let (machines, commitments) = key_gen_machines(id, params);
Expand All @@ -228,7 +195,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
panic!("commitments when already handled commitments");
}

let params = KeyGenDb::<N, D>::params(txn, &id.set).unwrap();
let params = ParamsDb::get(txn, id.set.encode()).unwrap();

// Unwrap the machines, rebuilding them if we didn't have them in our cache
// We won't if the processor rebooted
Expand Down Expand Up @@ -288,21 +255,21 @@ impl<N: Network, D: Db> KeyGen<N, D> {
share.extend(network_shares[i].serialize());
}

KeyGenDb::<N, D>::save_commitments(txn, &id, &commitments);
CommitmentsDb::set(txn, &id.encode(), &commitments);

ProcessorMessage::Shares { id, shares }
}

CoordinatorMessage::Shares { id, shares } => {
info!("Received shares for {:?}", id);

let params = KeyGenDb::<N, D>::params(txn, &id.set).unwrap();
let params = ParamsDb::get(txn, id.set.encode()).unwrap();

// Same commentary on inconsistency as above exists
let machines = self.active_share.remove(&id.set).unwrap_or_else(|| {
let machines = key_gen_machines(id, params).0;
let mut rng = secret_shares_rng(id);
let commitments = KeyGenDb::<N, D>::commitments(txn, &id);
let commitments = CommitmentsDb::get(txn, id.encode()).unwrap();

let mut commitments_ref: HashMap<Participant, &[u8]> =
commitments.iter().map(|(i, commitments)| (*i, commitments.as_ref())).collect();
Expand Down Expand Up @@ -376,7 +343,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
let mut network_keys = ThresholdKeys::new(network_keys);
N::tweak_keys(&mut network_keys);

KeyGenDb::<N, D>::save_keys(txn, &id, &substrate_keys, &network_keys);
GeneratedKeysDb::save_keys::<N>(txn, &id, &substrate_keys, &network_keys);

ProcessorMessage::GeneratedKeyPair {
id,
Expand All @@ -393,7 +360,7 @@ impl<N: Network, D: Db> KeyGen<N, D> {
set: ValidatorSet,
key_pair: KeyPair,
) -> KeyConfirmed<N::Curve> {
let (substrate_keys, network_keys) = KeyGenDb::<N, D>::confirm_keys(txn, set, key_pair);
let (substrate_keys, network_keys) = confirm_keys::<N>(txn, set, key_pair);

info!(
"Confirmed key pair {} {} for set {:?}",
Expand Down
Loading