Skip to content

Commit

Permalink
Merge branch 'main' into hybrid_info
Browse files Browse the repository at this point in the history
  • Loading branch information
tyurek authored Dec 3, 2024
2 parents 09f2f8c + dca5be7 commit 4eb6624
Show file tree
Hide file tree
Showing 89 changed files with 3,353 additions and 1,488 deletions.
29 changes: 25 additions & 4 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ jobs:
- name: "Checkout"
uses: actions/checkout@v4

- name: Docker meta
id: meta
- name: Docker meta helper
id: meta-helper
uses: docker/metadata-action@v5
with:
images: |
Expand All @@ -32,6 +32,17 @@ jobs:
type=ref,event=branch
type=ref,event=pr
type=sha
- name: Docker meta helper
id: meta-rc
uses: docker/metadata-action@v5
with:
images: |
ghcr.io/${{ github.repository }}/rc
tags: |
type=ref,event=branch
type=ref,event=pr
type=sha
- name: "Setup Docker Buildx"
uses: docker/setup-buildx-action@v3
Expand All @@ -51,6 +62,16 @@ jobs:
context: .
file: ./docker/helper.Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
tags: ${{ steps.meta-helper.outputs.tags }}
labels: ${{ steps.meta-helper.outputs.labels }}
platforms: linux/amd64

- name: "Build and Publish Report Collector Image"
uses: docker/build-push-action@v6
with:
context: .
file: ./docker/report_collector.Dockerfile
push: true
tags: ${{ steps.meta-rc.outputs.tags }}
labels: ${{ steps.meta-rc.outputs.labels }}
platforms: linux/amd64
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,12 @@ Note that if you want to build for a specific platform, different than the one y
docker build -t ipa:latest --platform=linux/amd64 -f docker/helper.Dockerfile .
```

The following command is used to build the report collector:

```
docker build -t ipa-rc:latest --platform=linux/amd64 -f docker/report_collector.Dockerfile .
```

### Pushing Docker to ghcr.io

First, follow the instructions [here](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) to get your Token.
Expand Down
2 changes: 1 addition & 1 deletion docker/helper.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@ ENV HELPER_BIN_PATH=/usr/local/bin/ipa-helper
ENV CONF_DIR=/etc/ipa
ARG SOURCES_DIR

RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y ca-certificates curl procps && rm -rf /var/lib/apt/lists/*

COPY --from=builder ${SOURCES_DIR}/target/release/helper $HELPER_BIN_PATH
20 changes: 20 additions & 0 deletions docker/report_collector.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# syntax=docker/dockerfile:1
ARG SOURCES_DIR=/usr/src/ipa
FROM rust:bookworm AS builder
ARG SOURCES_DIR

# Prepare report collector binaries
WORKDIR "$SOURCES_DIR"
COPY . .
RUN set -eux; \
cargo build --bin report_collector --release --no-default-features --features "cli test-fixture web-app real-world-infra compact-gate"

# Copy them to the final image
FROM rust:slim-bookworm
ENV RC_BIN_PATH=/usr/local/bin/report_collector
ENV CONF_DIR=/etc/ipa
ARG SOURCES_DIR

RUN apt-get update && apt-get install -y curl procps ca-certificates && rm -rf /var/lib/apt/lists/*

COPY --from=builder ${SOURCES_DIR}/target/release/report_collector $RC_BIN_PATH
3 changes: 2 additions & 1 deletion ipa-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,8 @@ harness = false
required-features = ["enable-benches"]

[[bench]]
name = "dzkp_convert_prover"
name = "dzkp"
path = "benches/ct/dzkp.rs"
harness = false
required-features = ["enable-benches"]

Expand Down
118 changes: 118 additions & 0 deletions ipa-core/benches/ct/dzkp.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
//! Benchmarks for DZKPs.
use std::iter::{repeat_with, zip};

use criterion::{criterion_group, criterion_main, BatchSize, Criterion, SamplingMode};
use futures::{stream::iter, TryStreamExt};
use ipa_core::{
ff::boolean_array::BA256,
helpers::TotalRecords,
protocol::{
basics::BooleanArrayMul,
context::{
dzkp_validator::{DZKPValidator, MultiplicationInputsBlock, TARGET_PROOF_SIZE},
malicious::TEST_DZKP_STEPS,
Context, DZKPUpgradedMaliciousContext, UpgradableContext,
},
RecordId,
},
secret_sharing::{replicated::semi_honest::AdditiveShare as Replicated, SharedValue},
sharding::NotSharded,
test_fixture::{Runner, TestWorld},
utils::non_zero_prev_power_of_two,
};
use rand::{thread_rng, Rng};
use tokio::runtime::Builder;

/// Benchmark for the table_indices_prover function in dzkp_field.rs.
fn benchmark_table_indices_prover(c: &mut Criterion) {
let mut group = c.benchmark_group("benches");
group.bench_function("table_indices_prover", |b| {
b.iter_batched_ref(
|| thread_rng().gen(),
|input: &mut MultiplicationInputsBlock| input.table_indices_prover(),
BatchSize::SmallInput,
)
});
group.finish();
}

/// Benchmark for end-to-end proof.
///
/// This benchmark focuses on proof performance by evaluating one of the simplest and
/// most performant MPC circuits possible: 64 million AND gates in parallel.
fn benchmark_proof(c: &mut Criterion) {
let rt = Builder::new_multi_thread()
.worker_threads(3)
.thread_name("helper-worker")
.enable_time()
.build()
.expect("Creating runtime failed");

type BA = BA256;
const COUNT: usize = 64 * 1024 * 1024 / BA::BITS as usize;

let mut group = c.benchmark_group("proof");
group.sample_size(10);
group.sampling_mode(SamplingMode::Flat);
group.bench_function("proof", |b| {
b.to_async(&rt).iter_batched(
|| {
let mut rng = thread_rng();

let a = repeat_with(|| rng.gen()).take(COUNT).collect::<Vec<BA>>();
let b = repeat_with(|| rng.gen()).take(COUNT).collect::<Vec<BA>>();

(a, b)
},
|(a, b): (Vec<BA>, Vec<BA>)| async move {
TestWorld::default()
.malicious((a.into_iter(), b.into_iter()), |ctx, (a, b)| async move {
let batch_size = non_zero_prev_power_of_two(
TARGET_PROOF_SIZE / usize::try_from(BA::BITS).unwrap(),
);
let v = ctx
.set_total_records(TotalRecords::specified(COUNT)?)
.dzkp_validator(TEST_DZKP_STEPS, batch_size);
let m_ctx = v.context();

v.validated_seq_join(iter(zip(a, b).enumerate().map(
|(i, (a_malicious, b_malicious))| {
let m_ctx = m_ctx.clone();
let a_vec = <Replicated<BA> as BooleanArrayMul<
DZKPUpgradedMaliciousContext<NotSharded>,
>>::Vectorized::from(
a_malicious
);
let b_vec = <Replicated<BA> as BooleanArrayMul<
DZKPUpgradedMaliciousContext<NotSharded>,
>>::Vectorized::from(
b_malicious
);
async move {
<Replicated<BA> as BooleanArrayMul<_>>::multiply(
m_ctx,
RecordId::from(i),
&a_vec,
&b_vec,
)
.await
.map(<Replicated<BA>>::from)
}
},
)))
.try_collect::<Vec<_>>()
.await
})
.await
.map(Result::unwrap);
},
BatchSize::PerIteration,
)
});
group.finish();
}

criterion_group!(benches, benchmark_table_indices_prover);
criterion_group!(proof, benchmark_proof);
criterion_main!(benches, proof);
20 changes: 0 additions & 20 deletions ipa-core/benches/dzkp_convert_prover.rs

This file was deleted.

1 change: 1 addition & 0 deletions ipa-core/benches/oneshot/ipa.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ async fn run(args: Args) -> Result<(), Error> {
..Default::default()
},
initial_gate: Some(Gate::default().narrow(&IpaPrf)),
timeout: None,
..TestWorldConfig::default()
};
// Construct TestWorld early to initialize logging.
Expand Down
28 changes: 18 additions & 10 deletions ipa-core/src/bin/helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,11 @@ use futures::future::join;
use hyper::http::uri::Scheme;
use ipa_core::{
cli::{
client_config_setup, keygen, test_setup, ConfGenArgs, KeygenArgs, LoggingHandle,
TestSetupArgs, Verbosity,
},
config::{
hpke_registry, sharded_server_from_toml_str, HpkeServerConfig, ServerConfig, TlsConfig,
client_config_setup, keygen, sharded_client_config_setup, sharded_server_from_toml_str,
test_setup, ConfGenArgs, KeygenArgs, LoggingHandle, ShardedConfGenArgs, TestSetupArgs,
Verbosity,
},
config::{hpke_registry, HpkeServerConfig, ServerConfig, TlsConfig},
error::BoxError,
executor::IpaRuntime,
helpers::HelperIdentity,
Expand Down Expand Up @@ -61,18 +60,18 @@ struct ServerArgs {
#[arg(short, long, required = true)]
identity: Option<usize>,

#[arg(default_value = "0")]
#[arg(long, default_value = "0")]
shard_index: Option<u32>,

#[arg(default_value = "1")]
#[arg(long, default_value = "1")]
shard_count: Option<u32>,

/// Port to listen on
#[arg(short, long, default_value = "3000")]
port: Option<u16>,

/// Port to use for shard-to-shard communication, if sharded MPC is used
#[arg(default_value = "6000")]
#[arg(long, default_value = "6000")]
shard_port: Option<u16>,

/// Use the supplied prebound socket instead of binding a new socket for mpc
Expand Down Expand Up @@ -123,6 +122,7 @@ struct ServerArgs {

#[derive(Debug, Subcommand)]
enum HelperCommand {
ShardedConfgen(ShardedConfGenArgs),
Confgen(ConfGenArgs),
Keygen(KeygenArgs),
TestSetup(TestSetupArgs),
Expand Down Expand Up @@ -182,8 +182,15 @@ async fn server(args: ServerArgs, logging_handle: LoggingHandle) -> Result<(), B
let my_identity = HelperIdentity::try_from(args.identity.expect("enforced by clap")).unwrap();
let shard_index = ShardIndex::from(args.shard_index.expect("enforced by clap"));
let shard_count = ShardIndex::from(args.shard_count.expect("enforced by clap"));
assert!(shard_index < shard_count);
assert_eq!(args.tls_cert.is_some(), !args.disable_https);
assert!(
shard_index < shard_count,
"Shard index should be lower than shard count"
);
assert_eq!(
args.tls_cert.is_some(),
!args.disable_https,
"Inconsistent configuration: TLS certs and disable_http"
);

let (identity, server_tls) =
create_client_identity(my_identity, args.tls_cert.clone(), args.tls_key.clone())?;
Expand Down Expand Up @@ -366,6 +373,7 @@ pub async fn main() {
Some(HelperCommand::Keygen(args)) => keygen(&args),
Some(HelperCommand::TestSetup(args)) => test_setup(args),
Some(HelperCommand::Confgen(args)) => client_config_setup(args),
Some(HelperCommand::ShardedConfgen(args)) => sharded_client_config_setup(args),
};

if let Err(e) = res {
Expand Down
53 changes: 44 additions & 9 deletions ipa-core/src/bin/test_mpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,18 @@ use generic_array::ArrayLength;
use hyper::http::uri::Scheme;
use ipa_core::{
cli::{
playbook::{make_clients, secure_add, secure_mul, validate, InputSource},
playbook::{
make_clients, make_sharded_clients, secure_add, secure_mul, secure_shuffle, validate,
InputSource,
},
Verbosity,
},
ff::{Field, FieldType, Fp31, Fp32BitPrime, Serializable, U128Conversions},
ff::{
boolean_array::BA64, Field, FieldType, Fp31, Fp32BitPrime, Serializable, U128Conversions,
},
helpers::query::{
QueryConfig,
QueryType::{TestAddInPrimeField, TestMultiply},
QueryType::{TestAddInPrimeField, TestMultiply, TestShardedShuffle},
},
net::{Helper, IpaHttpClient},
secret_sharing::{replicated::semi_honest::AdditiveShare, IntoShares},
Expand Down Expand Up @@ -103,11 +108,27 @@ async fn main() -> Result<(), Box<dyn Error>> {
Scheme::HTTPS
};

let (clients, _) = make_clients(args.network.as_deref(), scheme, args.wait).await;
match args.action {
TestAction::Multiply => multiply(&args, &clients).await,
TestAction::AddInPrimeField => add(&args, &clients).await,
TestAction::ShardedShuffle => sharded_shuffle(&args, &clients).await,
TestAction::Multiply => {
let (clients, _) = make_clients(args.network.as_deref(), scheme, args.wait).await;
multiply(&args, &clients).await
}
TestAction::AddInPrimeField => {
let (clients, _) = make_clients(args.network.as_deref(), scheme, args.wait).await;
add(&args, &clients).await
}
TestAction::ShardedShuffle => {
// we need clients to talk to each individual shard
let clients = make_sharded_clients(
args.network
.as_deref()
.expect("network config is required for sharded shuffle"),
scheme,
args.wait,
)
.await;
sharded_shuffle(&args, clients).await
}
};

Ok(())
Expand Down Expand Up @@ -166,6 +187,20 @@ async fn add(args: &Args, helper_clients: &[IpaHttpClient<Helper>; 3]) {
};
}

async fn sharded_shuffle(_args: &Args, _helper_clients: &[IpaHttpClient<Helper>; 3]) {
unimplemented!()
async fn sharded_shuffle(args: &Args, helper_clients: Vec<[IpaHttpClient<Helper>; 3]>) {
let input = InputSource::from(&args.input);
let input_rows = input
.iter::<u64>()
.map(BA64::truncate_from)
.collect::<Vec<_>>();
let query_config =
QueryConfig::new(TestShardedShuffle, args.input.field, input_rows.len()).unwrap();
let query_id = helper_clients[0][0]
.create_query(query_config)
.await
.unwrap();
let shuffled = secure_shuffle(input_rows.clone(), &helper_clients, query_id).await;

assert_eq!(shuffled.len(), input_rows.len());
assert_ne!(shuffled, input_rows);
}
Loading

0 comments on commit 4eb6624

Please sign in to comment.