From cdfd365eaca4c6ad6710bb453f70da53088a1350 Mon Sep 17 00:00:00 2001 From: Andrea Lattuada Date: Thu, 15 Aug 2024 14:06:03 +0200 Subject: [PATCH] Import macrobenchmark statistics evaluation automation Co-authored-by: Andrea Lattuada Co-authored-by: Jay Lorch Co-authored-by: Jon Howell Co-authored-by: Hayley LeBlanc Co-authored-by: Travis Hance --- macro-stats/.gitignore | 2 + macro-stats/README.md | 5 + macro-stats/dafny.sh | 66 +++ macro-stats/ironsht_files.txt | 48 +++ macro-stats/ironsht_files_nonlinear.txt | 2 + macro-stats/lib.sh | 32 ++ macro-stats/linear-dafny-early.sh | 26 ++ macro-stats/linear-dafny-encoding-stats.txt | 65 +++ macro-stats/linear-dafny-encoding-wc-c.txt | 65 +++ macro-stats/linear-dafny.sh | 45 ++ macro-stats/monster-table-generator.py | 159 +++++++ macro-stats/run.sh | 24 ++ macro-stats/summarize.sh | 21 + macro-stats/summarize/.gitignore | 1 + macro-stats/summarize/Cargo.lock | 303 ++++++++++++++ macro-stats/summarize/Cargo.toml | 9 + macro-stats/summarize/encoding_bytes.sh | 11 + macro-stats/summarize/src/main.rs | 412 +++++++++++++++++++ macro-stats/time.py | 18 + macro-stats/util/entry-dafny.sh | 8 + macro-stats/util/entry-linear-dafny-early.sh | 2 + macro-stats/util/entry-linear-dafny.sh | 2 + macro-stats/util/entry-summarize.sh | 8 + macro-stats/util/entry-verus-all-triggers.sh | 13 + macro-stats/util/entry-verus.sh | 13 + macro-stats/util/entry.dockerfile | 6 + macro-stats/verus.sh | 147 +++++++ 27 files changed, 1513 insertions(+) create mode 100644 macro-stats/.gitignore create mode 100644 macro-stats/README.md create mode 100644 macro-stats/dafny.sh create mode 100644 macro-stats/ironsht_files.txt create mode 100644 macro-stats/ironsht_files_nonlinear.txt create mode 100644 macro-stats/lib.sh create mode 100644 macro-stats/linear-dafny-early.sh create mode 100644 macro-stats/linear-dafny-encoding-stats.txt create mode 100644 macro-stats/linear-dafny-encoding-wc-c.txt create mode 100644 macro-stats/linear-dafny.sh create mode 100755 macro-stats/monster-table-generator.py create mode 100644 macro-stats/run.sh create mode 100644 macro-stats/summarize.sh create mode 100644 macro-stats/summarize/.gitignore create mode 100644 macro-stats/summarize/Cargo.lock create mode 100644 macro-stats/summarize/Cargo.toml create mode 100644 macro-stats/summarize/encoding_bytes.sh create mode 100644 macro-stats/summarize/src/main.rs create mode 100644 macro-stats/time.py create mode 100644 macro-stats/util/entry-dafny.sh create mode 100644 macro-stats/util/entry-linear-dafny-early.sh create mode 100644 macro-stats/util/entry-linear-dafny.sh create mode 100644 macro-stats/util/entry-summarize.sh create mode 100644 macro-stats/util/entry-verus-all-triggers.sh create mode 100644 macro-stats/util/entry-verus.sh create mode 100644 macro-stats/util/entry.dockerfile create mode 100644 macro-stats/verus.sh diff --git a/macro-stats/.gitignore b/macro-stats/.gitignore new file mode 100644 index 0000000..f8069a1 --- /dev/null +++ b/macro-stats/.gitignore @@ -0,0 +1,2 @@ +/repos/ +/results/ diff --git a/macro-stats/README.md b/macro-stats/README.md new file mode 100644 index 0000000..03d7564 --- /dev/null +++ b/macro-stats/README.md @@ -0,0 +1,5 @@ + +Install docker, run `bash run.sh`. + +Produces results in `results/`, in particular `results.json`, `results-latex-commands.tex`, `results-latex-table.tex`. + diff --git a/macro-stats/dafny.sh b/macro-stats/dafny.sh new file mode 100644 index 0000000..e5a09c9 --- /dev/null +++ b/macro-stats/dafny.sh @@ -0,0 +1,66 @@ +#! /bin/bash + +# unzip libicu-dev libgomp1 python3 + +DAFNY_THREADS=8 + +VERIFY_IRONSHT=1 + +set -e +set -x + +. lib.sh + +if [ $# -lt 1 ]; then + echo "usage: $0 " + exit 1 +fi + +RESULTS_DIR=$1 + +print_header "setting up repos/" + +if [ ! -d "repos" ]; then + mkdir repos +fi + +print_header "cloning or updating repositories" + +clone_and_update_repository "ironclad" "main" "https://github.com/microsoft/Ironclad.git" + +(cd repos; + if [ ! -d "dafny-3.4.0" ]; then + mkdir dafny-3.4.0 + cd dafny-3.4.0 + curl -LO https://github.com/dafny-lang/dafny/releases/download/v3.4.0/dafny-3.4.0-x64-ubuntu-16.04.zip + unzip dafny-3.4.0-x64-ubuntu-16.04.zip + rm dafny-3.4.0-x64-ubuntu-16.04.zip + fi) + +print_header "verifying" + +cd $RESULTS_DIR + +echo $DAFNY_THREADS > dafny-num-threads.txt + +if [ $VERIFY_IRONSHT -eq 1 ]; then + print_step "verifying ironsht" + + mkdir -p ironsht && cd ironsht + + DAFNY_EXE=../../repos/dafny-3.4.0/dafny/dafny + + IRONSHT_FILES=$(sed -e 's/^/..\/..\/repos\/ironclad\//' ../../ironsht_files.txt | tr '\n' ' ' | tr -d '\r') + IRONSHT_NONLINEAR_FILES=$(sed -e 's/^/..\/..\/repos\/ironclad\//' ../../ironsht_files_nonlinear.txt | tr '\n' ' ' | tr -d '\r') + + python3 ../../time.py dafny-verification-parallel.output.txt dafny-verification-parallel.time.txt \ + $DAFNY_EXE /compile:0 /arith:5 /noCheating:1 /trace /vcsCores:$DAFNY_THREADS $IRONSHT_FILES + python3 ../../time.py dafny-verification-parallel-nonlinear.output.txt dafny-verification-parallel-nonlinear.time.txt \ + $DAFNY_EXE /compile:0 /arith:2 /noCheating:1 /trace /vcsCores:$DAFNY_THREADS $IRONSHT_NONLINEAR_FILES + python3 ../../time.py dafny-verification-singlethread.output.txt dafny-verification-singlethread.time.txt \ + $DAFNY_EXE /compile:0 /arith:5 /noCheating:1 /trace /vcsCores:1 $IRONSHT_FILES + python3 ../../time.py dafny-verification-singlethread-nonlinear.output.txt dafny-verification-singlethread-nonlinear.time.txt \ + $DAFNY_EXE /compile:0 /arith:2 /noCheating:1 /trace /vcsCores:1 $IRONSHT_NONLINEAR_FILES + + cd .. +fi diff --git a/macro-stats/ironsht_files.txt b/macro-stats/ironsht_files.txt new file mode 100644 index 0000000..293803b --- /dev/null +++ b/macro-stats/ironsht_files.txt @@ -0,0 +1,48 @@ +ironfleet/src/Dafny/Distributed/Common/Collections/Maps2.s.dfy +ironfleet/src/Dafny/Distributed/Common/Framework/Environment.s.dfy +ironfleet/src/Dafny/Distributed/Common/Framework/Host.s.dfy +ironfleet/src/Dafny/Distributed/Common/Native/Io.s.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/mod_auto.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/mod_auto_proofs.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/mul.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/mul_auto.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/mul_auto_proofs.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../Common/Collections/Seqs.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../Common/CmdLineParser.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../Common/NetClient.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/CmdLineParser.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/Host.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/NetSHT.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/SchedulerImpl.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/SchedulerModel.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/Unsendable.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/../../Common/Logic/Option.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/../../Common/Native/NativeTypes.s.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/../Common/GenericMarshalling.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/../Common/GenericRefinement.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/../Common/NodeIdentity.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/AppInterface.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/AppInterfaceConcrete.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/CMessage.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/ConstantsState.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/DelegationLookup.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/Delegations.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/HostModel.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/HostState.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/PacketParsing.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/Parameters.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/SHTConcreteConfiguration.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/SingleDeliveryModel.i.dfy +ironfleet/src/Dafny/Distributed/Impl/SHT/SingleDeliveryState.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/LiveSHT/Scheduler.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Configuration.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Delegations.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Host.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Keys.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Message.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Network.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/Parameters.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/SingleDelivery.i.dfy +ironfleet/src/Dafny/Distributed/Protocol/SHT/SingleMessage.i.dfy +ironfleet/src/Dafny/Distributed/Services/SHT/Main.i.dfy +ironfleet/src/Dafny/Distributed/Services/SHT/Marshall.i.dfy diff --git a/macro-stats/ironsht_files_nonlinear.txt b/macro-stats/ironsht_files_nonlinear.txt new file mode 100644 index 0000000..908109f --- /dev/null +++ b/macro-stats/ironsht_files_nonlinear.txt @@ -0,0 +1,2 @@ +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/mul_nonlinear.i.dfy +ironfleet/src/Dafny/Distributed/Impl/LiveSHT/../../../Libraries/Math/div_nonlinear.i.dfy diff --git a/macro-stats/lib.sh b/macro-stats/lib.sh new file mode 100644 index 0000000..5483c3c --- /dev/null +++ b/macro-stats/lib.sh @@ -0,0 +1,32 @@ + +print_header() { + local message=$1 + echo -e "\033[32m■ $message\033[0m" +} + +print_step() { + local message=$1 + echo -e "\033[34m ■ $message\033[0m" +} + +clone_and_update_repository() { + print_step "cloning or updating $1" + + cd repos + + local repo_name=$1 + local branch=$2 + local repo_url=$3 + local repo_path="$repo_name" + + if [ ! -d "$repo_path" ]; then + GIT_SSH_COMMAND="ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" \ + git clone -b $branch --single-branch --depth 1 $repo_url $repo_path + fi + (cd $repo_path; + GIT_SSH_COMMAND="ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null" \ + git pull --ff-only) + + cd .. # repos +} + diff --git a/macro-stats/linear-dafny-early.sh b/macro-stats/linear-dafny-early.sh new file mode 100644 index 0000000..a78220f --- /dev/null +++ b/macro-stats/linear-dafny-early.sh @@ -0,0 +1,26 @@ +#! /bin/bash + +DAFNY_THREADS=8 + +set -e +set -x + +. lib.sh + +if [ $# -lt 1 ]; then + echo "usage: $0 " + exit 1 +fi + +RESULTS_DIR=$1 + +print_header "setting up repos/" + +if [ ! -d "repos" ]; then + mkdir repos +fi + +print_header "cloning or updating repositories" + +clone_and_update_repository "ironsync" "osdi2023-artifact" "git@github.com:secure-foundations/ironsync-osdi2023.git" + diff --git a/macro-stats/linear-dafny-encoding-stats.txt b/macro-stats/linear-dafny-encoding-stats.txt new file mode 100644 index 0000000..fc4dd2e --- /dev/null +++ b/macro-stats/linear-dafny-encoding-stats.txt @@ -0,0 +1,65 @@ +total 4407904 +-rw-r--r-- 1 andreal1 staff 57297145 Dec 4 14:27 log-2023-12-04T13.27.28.5706360+00.00-concurrency-node-replication-Interface.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2059208 Dec 4 14:27 log-2023-12-04T13.27.48.8646435+00.00-concurrency-node-replication-Interface.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 953440 Dec 4 14:27 log-2023-12-04T13.27.52.8481608+00.00-concurrency-node-replication-NRSpec.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 1569107 Dec 4 14:27 log-2023-12-04T13.27.56.0336421+00.00-concurrency-framework-StateMachines.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 1633504 Dec 4 14:28 log-2023-12-04T13.27.56.0336421+00.00-concurrency-framework-StateMachines.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 1318875 Dec 4 14:28 log-2023-12-04T13.27.56.0336421+00.00-concurrency-framework-StateMachines.s.dfy.smt2.2 +-rw-r--r-- 1 andreal1 staff 462611 Dec 4 14:28 log-2023-12-04T13.28.01.1769232+00.00-concurrency-node-replication-Linearize.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 669652 Dec 4 14:28 log-2023-12-04T13.28.01.1769232+00.00-concurrency-node-replication-Linearize.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 1204608 Dec 4 14:28 log-2023-12-04T13.28.05.1551617+00.00-concurrency-framework-AsyncSSM.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 1943380 Dec 4 14:28 log-2023-12-04T13.28.05.1551617+00.00-concurrency-framework-AsyncSSM.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 9996892 Dec 4 14:28 log-2023-12-04T13.28.05.1551617+00.00-concurrency-framework-AsyncSSM.s.dfy.smt2.2 +-rw-r--r-- 1 andreal1 staff 449020 Dec 4 14:28 log-2023-12-04T13.28.12.0310287+00.00-concurrency-framework-PCM.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2916518 Dec 4 14:28 log-2023-12-04T13.28.12.0310287+00.00-concurrency-framework-PCM.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 10774017 Dec 4 14:28 log-2023-12-04T13.28.16.2144152+00.00-lib-Lang-LinearSequence.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 5760789 Dec 4 14:28 log-2023-12-04T13.28.21.4328978+00.00-lib-Lang-NativeTypes.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2269925 Dec 4 14:28 log-2023-12-04T13.28.25.8366585+00.00-lib-Lang-LinearMaybe.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 77624752 Dec 4 14:28 log-2023-12-04T13.28.29.4444561+00.00-concurrency-node-replication-Init.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 3943038 Dec 4 14:29 log-2023-12-04T13.28.55.6496438+00.00-concurrency-node-replication-Impl.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 203887529 Dec 4 14:29 log-2023-12-04T13.28.55.6496438+00.00-concurrency-node-replication-Impl.i.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 177498943 Dec 4 14:32 log-2023-12-04T13.29.46.8144611+00.00-concurrency-node-replication-InfiniteLogTokens.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 303962692 Dec 4 14:34 log-2023-12-04T13.32.17.3468999+00.00-concurrency-node-replication-InfiniteLog.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 10332790 Dec 4 14:34 log-2023-12-04T13.34.49.4593714+00.00-concurrency-framework-AsyncSSM.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 715368 Dec 4 14:34 log-2023-12-04T13.34.54.8048547+00.00-lib-Base-Option.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2464250 Dec 4 14:35 log-2023-12-04T13.34.57.7347808+00.00-concurrency-node-replication-Constants.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 671304 Dec 4 14:35 log-2023-12-04T13.35.01.0773862+00.00-concurrency-framework-GlinearMap.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 6803557 Dec 4 14:35 log-2023-12-04T13.35.03.8988362+00.00-concurrency-framework-Ptrs.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 719446 Dec 4 14:35 log-2023-12-04T13.35.08.2318001+00.00-concurrency-framework-GlinearOption.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 49393561 Dec 4 14:35 log-2023-12-04T13.35.11.4975346+00.00-lib-Lang-LinearSequence.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 25209698 Dec 4 14:35 log-2023-12-04T13.35.23.7867456+00.00-concurrency-node-replication-rwlock-Impl.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2512752 Dec 4 14:35 log-2023-12-04T13.35.33.3077550+00.00-concurrency-framework-Atomic.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2644872 Dec 4 14:35 log-2023-12-04T13.35.33.3077550+00.00-concurrency-framework-Atomic.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 714994 Dec 4 14:35 log-2023-12-04T13.35.37.8969898+00.00-concurrency-framework-Cells.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 348351 Dec 4 14:35 log-2023-12-04T13.35.40.8770008+00.00-concurrency-node-replication-Runtime.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 36868986 Dec 4 14:35 log-2023-12-04T13.35.45.5833056+00.00-concurrency-node-replication-rwlock-RwLock.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 33457375 Dec 4 14:36 log-2023-12-04T13.35.45.5833056+00.00-concurrency-node-replication-rwlock-RwLock.i.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 3258600 Dec 4 14:36 log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 3437851 Dec 4 14:36 log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 9118089 Dec 4 14:36 log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2.2 +-rw-r--r-- 1 andreal1 staff 21598593 Dec 4 14:36 log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2.3 +-rw-r--r-- 1 andreal1 staff 512073 Dec 4 14:36 log-2023-12-04T13.36.16.9074371+00.00-concurrency-framework-PCMExt.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2355472 Dec 4 14:36 log-2023-12-04T13.36.16.9074371+00.00-concurrency-framework-PCMExt.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 4421271 Dec 4 14:36 log-2023-12-04T13.36.21.0021382+00.00-concurrency-framework-PCMWrap.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2045677 Dec 4 14:36 log-2023-12-04T13.36.21.0021382+00.00-concurrency-framework-PCMWrap.s.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 9751665 Dec 4 14:36 log-2023-12-04T13.36.25.8294379+00.00-concurrency-scache-rwlock-FullMap.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 2450644 Dec 4 14:36 log-2023-12-04T13.36.30.9493731+00.00-concurrency-scache-rwlock-MapSum.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 4241652 Dec 4 14:36 log-2023-12-04T13.36.36.9273297+00.00-concurrency-scache-ClientCounter.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 7604270 Dec 4 14:36 log-2023-12-04T13.36.36.9273297+00.00-concurrency-scache-ClientCounter.i.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 1372031 Dec 4 14:36 log-2023-12-04T13.36.42.5543811+00.00-concurrency-framework-BasicPCM.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 223401764 Dec 4 14:37 log-2023-12-04T13.36.46.1241424+00.00-concurrency-node-replication-CyclicBufferTokens.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 270044583 Dec 4 14:39 log-2023-12-04T13.37.50.9990527+00.00-concurrency-node-replication-CyclicBuffer.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 4348458 Dec 4 14:39 log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 3879802 Dec 4 14:39 log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2.1 +-rw-r--r-- 1 andreal1 staff 17266919 Dec 4 14:39 log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2.2 +-rw-r--r-- 1 andreal1 staff 37694895 Dec 4 14:39 log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2.3 +-rw-r--r-- 1 andreal1 staff 37737165 Dec 4 14:39 log-2023-12-04T13.39.48.0049473+00.00-lib-Base-Multisets.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 21588617 Dec 4 14:40 log-2023-12-04T13.39.59.3772034+00.00-lib-Base-Maps.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 343879 Dec 4 14:40 log-2023-12-04T13.40.06.4714029+00.00-lib-Base-MapRemove.s.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 92099958 Dec 4 14:40 log-2023-12-04T13.40.09.5477358+00.00-lib-Base-sequences.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 1318589 Dec 4 14:40 log-2023-12-04T13.40.37.4333509+00.00-lib-Base-mathematics.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 60608306 Dec 4 14:40 log-2023-12-04T13.40.41.1967269+00.00-concurrency-node-replication-FlatCombinerTokens.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 55116219 Dec 4 14:41 log-2023-12-04T13.40.57.0237983+00.00-concurrency-node-replication-FlatCombiner.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 167790118 Dec 4 14:43 log-2023-12-04T13.41.56.3070071+00.00-concurrency-node-replication-InfiniteLog_Refines_NRSimple.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 22366777 Dec 4 14:43 log-2023-12-04T13.43.25.2015735+00.00-concurrency-node-replication-NRSimple.i.dfy.smt2 +-rw-r--r-- 1 andreal1 staff 33915255 Dec 4 14:43 log-2023-12-04T13.43.32.8322910+00.00-concurrency-node-replication-Linearize.i.dfy.smt2 diff --git a/macro-stats/linear-dafny-encoding-wc-c.txt b/macro-stats/linear-dafny-encoding-wc-c.txt new file mode 100644 index 0000000..ecd733c --- /dev/null +++ b/macro-stats/linear-dafny-encoding-wc-c.txt @@ -0,0 +1,65 @@ +c 57297145 results/nr/linear-dafny-encoding/log-2023-12-04T13.27.28.5706360+00.00-concurrency-node-replication-Interface.i.dfy.smt2 + 2059208 results/nr/linear-dafny-encoding/log-2023-12-04T13.27.48.8646435+00.00-concurrency-node-replication-Interface.s.dfy.smt2 + 953440 results/nr/linear-dafny-encoding/log-2023-12-04T13.27.52.8481608+00.00-concurrency-node-replication-NRSpec.s.dfy.smt2 + 1569107 results/nr/linear-dafny-encoding/log-2023-12-04T13.27.56.0336421+00.00-concurrency-framework-StateMachines.s.dfy.smt2 + 1633504 results/nr/linear-dafny-encoding/log-2023-12-04T13.27.56.0336421+00.00-concurrency-framework-StateMachines.s.dfy.smt2.1 + 1318875 results/nr/linear-dafny-encoding/log-2023-12-04T13.27.56.0336421+00.00-concurrency-framework-StateMachines.s.dfy.smt2.2 + 462611 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.01.1769232+00.00-concurrency-node-replication-Linearize.s.dfy.smt2 + 669652 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.01.1769232+00.00-concurrency-node-replication-Linearize.s.dfy.smt2.1 + 1204608 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.05.1551617+00.00-concurrency-framework-AsyncSSM.s.dfy.smt2 + 1943380 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.05.1551617+00.00-concurrency-framework-AsyncSSM.s.dfy.smt2.1 + 9996892 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.05.1551617+00.00-concurrency-framework-AsyncSSM.s.dfy.smt2.2 + 449020 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.12.0310287+00.00-concurrency-framework-PCM.s.dfy.smt2 + 2916518 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.12.0310287+00.00-concurrency-framework-PCM.s.dfy.smt2.1 + 10774017 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.16.2144152+00.00-lib-Lang-LinearSequence.s.dfy.smt2 + 5760789 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.21.4328978+00.00-lib-Lang-NativeTypes.s.dfy.smt2 + 2269925 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.25.8366585+00.00-lib-Lang-LinearMaybe.s.dfy.smt2 + 77624752 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.29.4444561+00.00-concurrency-node-replication-Init.i.dfy.smt2 + 3943038 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.55.6496438+00.00-concurrency-node-replication-Impl.i.dfy.smt2 + 203887529 results/nr/linear-dafny-encoding/log-2023-12-04T13.28.55.6496438+00.00-concurrency-node-replication-Impl.i.dfy.smt2.1 + 177498943 results/nr/linear-dafny-encoding/log-2023-12-04T13.29.46.8144611+00.00-concurrency-node-replication-InfiniteLogTokens.i.dfy.smt2 + 303962692 results/nr/linear-dafny-encoding/log-2023-12-04T13.32.17.3468999+00.00-concurrency-node-replication-InfiniteLog.i.dfy.smt2 + 10332790 results/nr/linear-dafny-encoding/log-2023-12-04T13.34.49.4593714+00.00-concurrency-framework-AsyncSSM.i.dfy.smt2 + 715368 results/nr/linear-dafny-encoding/log-2023-12-04T13.34.54.8048547+00.00-lib-Base-Option.s.dfy.smt2 + 2464250 results/nr/linear-dafny-encoding/log-2023-12-04T13.34.57.7347808+00.00-concurrency-node-replication-Constants.i.dfy.smt2 + 671304 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.01.0773862+00.00-concurrency-framework-GlinearMap.s.dfy.smt2 + 6803557 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.03.8988362+00.00-concurrency-framework-Ptrs.s.dfy.smt2 + 719446 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.08.2318001+00.00-concurrency-framework-GlinearOption.s.dfy.smt2 + 49393561 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.11.4975346+00.00-lib-Lang-LinearSequence.i.dfy.smt2 + 25209698 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.23.7867456+00.00-concurrency-node-replication-rwlock-Impl.i.dfy.smt2 + 2512752 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.33.3077550+00.00-concurrency-framework-Atomic.s.dfy.smt2 + 2644872 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.33.3077550+00.00-concurrency-framework-Atomic.s.dfy.smt2.1 + 714994 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.37.8969898+00.00-concurrency-framework-Cells.s.dfy.smt2 + 348351 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.40.8770008+00.00-concurrency-node-replication-Runtime.s.dfy.smt2 + 36868986 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.45.5833056+00.00-concurrency-node-replication-rwlock-RwLock.i.dfy.smt2 + 33457375 results/nr/linear-dafny-encoding/log-2023-12-04T13.35.45.5833056+00.00-concurrency-node-replication-rwlock-RwLock.i.dfy.smt2.1 + 3258600 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2 + 3437851 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2.1 + 9118089 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2.2 + 21598593 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.05.7455203+00.00-concurrency-framework-Rw.i.dfy.smt2.3 + 512073 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.16.9074371+00.00-concurrency-framework-PCMExt.s.dfy.smt2 + 2355472 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.16.9074371+00.00-concurrency-framework-PCMExt.s.dfy.smt2.1 + 4421271 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.21.0021382+00.00-concurrency-framework-PCMWrap.s.dfy.smt2 + 2045677 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.21.0021382+00.00-concurrency-framework-PCMWrap.s.dfy.smt2.1 + 9751665 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.25.8294379+00.00-concurrency-scache-rwlock-FullMap.i.dfy.smt2 + 2450644 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.30.9493731+00.00-concurrency-scache-rwlock-MapSum.i.dfy.smt2 + 4241652 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.36.9273297+00.00-concurrency-scache-ClientCounter.i.dfy.smt2 + 7604270 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.36.9273297+00.00-concurrency-scache-ClientCounter.i.dfy.smt2.1 + 1372031 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.42.5543811+00.00-concurrency-framework-BasicPCM.i.dfy.smt2 + 223401764 results/nr/linear-dafny-encoding/log-2023-12-04T13.36.46.1241424+00.00-concurrency-node-replication-CyclicBufferTokens.i.dfy.smt2 + 270044583 results/nr/linear-dafny-encoding/log-2023-12-04T13.37.50.9990527+00.00-concurrency-node-replication-CyclicBuffer.i.dfy.smt2 + 4348458 results/nr/linear-dafny-encoding/log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2 + 3879802 results/nr/linear-dafny-encoding/log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2.1 + 17266919 results/nr/linear-dafny-encoding/log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2.2 + 37694895 results/nr/linear-dafny-encoding/log-2023-12-04T13.39.31.7607858+00.00-concurrency-framework-MultiRw.i.dfy.smt2.3 + 37737165 results/nr/linear-dafny-encoding/log-2023-12-04T13.39.48.0049473+00.00-lib-Base-Multisets.i.dfy.smt2 + 21588617 results/nr/linear-dafny-encoding/log-2023-12-04T13.39.59.3772034+00.00-lib-Base-Maps.i.dfy.smt2 + 343879 results/nr/linear-dafny-encoding/log-2023-12-04T13.40.06.4714029+00.00-lib-Base-MapRemove.s.dfy.smt2 + 92099958 results/nr/linear-dafny-encoding/log-2023-12-04T13.40.09.5477358+00.00-lib-Base-sequences.i.dfy.smt2 + 1318589 results/nr/linear-dafny-encoding/log-2023-12-04T13.40.37.4333509+00.00-lib-Base-mathematics.i.dfy.smt2 + 60608306 results/nr/linear-dafny-encoding/log-2023-12-04T13.40.41.1967269+00.00-concurrency-node-replication-FlatCombinerTokens.i.dfy.smt2 + 55116219 results/nr/linear-dafny-encoding/log-2023-12-04T13.40.57.0237983+00.00-concurrency-node-replication-FlatCombiner.i.dfy.smt2 + 167790118 results/nr/linear-dafny-encoding/log-2023-12-04T13.41.56.3070071+00.00-concurrency-node-replication-InfiniteLog_Refines_NRSimple.i.dfy.smt2 + 22366777 results/nr/linear-dafny-encoding/log-2023-12-04T13.43.25.2015735+00.00-concurrency-node-replication-NRSimple.i.dfy.smt2 + 33915255 results/nr/linear-dafny-encoding/log-2023-12-04T13.43.32.8322910+00.00-concurrency-node-replication-Linearize.i.dfy.smt2 + 2162742141 total diff --git a/macro-stats/linear-dafny.sh b/macro-stats/linear-dafny.sh new file mode 100644 index 0000000..8258b1e --- /dev/null +++ b/macro-stats/linear-dafny.sh @@ -0,0 +1,45 @@ +#! /bin/bash + +DAFNY_THREADS=8 + +VERIFY_NR=1 + +set -e +set -x + +. lib.sh + +if [ $# -lt 1 ]; then + echo "usage: $0 " + exit 1 +fi + +RESULTS_DIR=$1 + +print_header "verifying" + +cd $RESULTS_DIR + +echo $DAFNY_THREADS > linear-dafny-num-threads.txt + +if [ $VERIFY_NR -eq 1 ]; then + print_step "verifying nr" + + mkdir -p nr && cd nr + RESULTS_DIR=$(pwd) + + cd /root/ironsync + rm -R build/* || true + python3 /root/eval/time.py \ + /root/eval/results/nr/linear-dafny-verification-parallel.output.txt \ + /root/eval/results/nr/linear-dafny-verification-parallel.time.txt \ + make -j$DAFNY_THREADS build/concurrency/node-replication/Interface.i.verified + rm -R build/* || true + python3 /root/eval/time.py \ + /root/eval/results/nr/linear-dafny-verification-singlethread.output.txt \ + /root/eval/results/nr/linear-dafny-verification-singlethread.time.txt \ + make -j1 build/concurrency/node-replication/Interface.i.verified + + cd $RESULTS_DIR + cd .. # results +fi diff --git a/macro-stats/monster-table-generator.py b/macro-stats/monster-table-generator.py new file mode 100755 index 0000000..45a73f2 --- /dev/null +++ b/macro-stats/monster-table-generator.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 +import os + +table = r"""\begin{tabular}{l|rrr|r|rr|r} + \multirow{2}{1.6cm}{\vfill{}System \\ $\rightarrow$ Verifier} + & \multicolumn{3}{c|}{Line Count} + & \multirow{2}{.2cm}{\tablerot{P/C Ratio}} + & \multicolumn{2}{c|}{Time (s)} + & \multirow{2}{.2cm}{\tablerot{SMT (MB)}} \\ + + & \tablerot{trusted} + & \tablerot{proof} + & \tablerot{code} + & + & \tablerot{1 core} + & \tablerot{\evalParallelNumThreads\xspace cores} + & \\ \hline +""" + +HLINE = "hline" +class Row: + def __init__(self, label, time1, time8, trusted, proof, exec, pcratio, smt): + self.label = label + self.time1 = time1 + self.time8 = time8 + self.trusted = trusted + self.proof = proof + self.exec = exec + self.pcratio = pcratio + self.smt = smt + +# ha ha ha we can't compute the pcratio because we don't have the data! only symbols +# def compute_pcratio(self): +# if self.proof == "": +# return "" +# return ".1f" % (float(self.proof)/float(self.exec)) + + +def empty(label): + return Row(label, " ", " ", " ", " ", " ", " ", " ") + +rows = [ + empty("IronKV"), + Row(r"$\rightarrow$ \name", + r"\evalVerusIronshtSinglethreadWallTime", + r"\evalVerusIronshtParallelWallTime", + r"\evalVerusIronshtLineCountTrusted", + r"\evalVerusIronshtLineCountProof", + r"\evalVerusIronshtLineCountExec", + r"\evalVerusIronshtLineCountProofCodeRatio", + r"\evalVerusIronshtEncodingSizeMB", + ), + Row(r"$\rightarrow$ Dafny", + r"\evalDafnyIronshtSinglethreadWallTime", + r"\evalDafnyIronshtParallelWallTime", + r"\evalDafnyIronshtSpecLines", + r"\evalDafnyIronshtProofLines", + r"\evalDafnyIronshtImplLines", + r"\evalDafnyIronshtProofCodeRatio", + r"\ironKVDafnySMTMB", + ), + HLINE, + empty("NR"), + Row(r"$\rightarrow$ \name", + r"\evalVerusNrSinglethreadWallTime", + r"\evalVerusNrParallelWallTime", + r"\evalVerusNrLineCountTrusted", + r"\evalVerusNrLineCountProof", + r"\evalVerusNrLineCountExec", + r"\evalVerusNrLineCountProofCodeRatio", + r"\evalVerusNrEncodingSizeMB", + ), + Row(r"$\rightarrow$ L.Dafny", + r"\evalLinearDafnyNrSinglethreadWallTime", + r"\evalLinearDafnyNrParallelWallTime", + r"\evalLinearDafnyNrLineCountTrusted", + r"\evalLinearDafnyNrLineCountProof", + r"\evalLinearDafnyNrLineCountExec", + r"\evalLinearDafnyNrLineCountProofCodeRatio", + r"\nrDafnySMTMB", + ), + HLINE, + HLINE, + Row("Page table", + r"\evalVerusPageTableSinglethreadWallTime", + r"\evalVerusPageTableParallelWallTime", + r"\evalVerusPageTableLineCountTrustedAdjusted", + r"\evalVerusPageTableLineCountProofAdjusted", + r"\evalVerusPageTableLineCountExecAdjusted", + r"\evalVerusPageTableLineCountProofCodeRatioAdjusted", + r"\evalVerusPageTableEncodingSizeMB", + ), + HLINE, + Row("Mimalloc", + r"\evalVerusMimallocSinglethreadWallTime", + r"\evalVerusMimallocParallelWallTime", + r"\allocatorTotalTrusted", + r"\allocatorProof", + r"\allocatorExec", + r"\allocatorProofToCodeRatio", + r"\evalVerusMimallocEncodingSizeMB", + ), + HLINE, + Row("P. log", + r"\evalVerusPmemlogSinglethreadWallTime", + r"\evalVerusPmemlogParallelWallTime", + r"\evalVerusPmemlogLineCountTrusted", + r"\evalVerusPmemlogLineCountProof", + r"\evalVerusPmemlogLineCountExec", + r"\evalVerusPmemlogLineCountProofCodeRatio", + r"\evalVerusPmemlogEncodingSizeMB", + ), + HLINE, + HLINE, + Row("Verus total", + r"", + r"", + r"\totalVerusLinesTrusted", + r"\totalVerusLinesProof", + r"\totalVerusLinesExec", + r"\totalVerusLinesProofCodeRatio", + r"", + ), + ] + +def strip_right(cols): + if len(cols)==0: return cols + if cols[-1] != "": return cols + return strip_right(cols[:-1]) + +# % computed with ../eval/totals_data.py +for row in rows: + if row == HLINE: + table += r" \hline" + continue + cols = [ + row.label, + row.trusted, + row.proof, + row.exec, + row.pcratio, + row.time1, + row.time8, + row.smt, + ] + cols = strip_right(cols) + table += " " + (" & ".join(cols)) + r" \\" + table += "\n" + +#table += r""" \multicolumn{3}{l|}{XXX Verus total} & 3852 & 30437 & 5757 & 5.3 & +table += r""" +\end{tabular} +""" + +script_path = os.path.dirname(__file__) +out_path = os.path.join(script_path, "../paper/plots/monster-table.tex") +open(out_path, "w").write(table) + + diff --git a/macro-stats/run.sh b/macro-stats/run.sh new file mode 100644 index 0000000..f51a377 --- /dev/null +++ b/macro-stats/run.sh @@ -0,0 +1,24 @@ + +print_top_header() { + local message=$1 + echo -e "\033[35m■■■ $message ■■■\033[0m" +} + +mkdir results + +print_top_header "collecting results for verus" + +docker run --platform=linux/amd64 --rm -it -v .:/root/eval -w /root/eval ghcr.io/utaal/ubuntu-essentials-rust-1.76.0 /bin/bash util/entry-verus.sh + +print_top_header "collecting results for dafny" + +docker run --platform=linux/amd64 --rm -it -v .:/root/eval -w /root/eval ghcr.io/utaal/ubuntu-essentials-rust-1.76.0 /bin/bash util/entry-dafny.sh + +print_top_header "collecting results for linear-dafny" + +docker run --platform=linux/amd64 --rm -it -v .:/root/eval -w /root/eval ghcr.io/utaal/ironsync-osdi2023-artifact /bin/bash util/entry-linear-dafny-early.sh +docker run --platform=linux/amd64 --rm -it -v .:/root/eval -v ./repos/ironsync/build:/root/ironsync/build -w /root/eval ghcr.io/utaal/ironsync-osdi2023-artifact /bin/bash util/entry-linear-dafny.sh + +print_top_header "summarizing" + +docker run --platform=linux/amd64 --rm -it -v .:/root/eval -w /root/eval ghcr.io/utaal/ubuntu-essentials-rust-1.76.0 /bin/bash util/entry-summarize.sh diff --git a/macro-stats/summarize.sh b/macro-stats/summarize.sh new file mode 100644 index 0000000..4b2b7bd --- /dev/null +++ b/macro-stats/summarize.sh @@ -0,0 +1,21 @@ +#! /bin/bash + +VERUS_NUM_THREADS=8 + +set -e +set -x + +. lib.sh + +if [ $# -lt 1 ]; then + echo "usage: $0 " + exit 1 +fi + +RESULTS_DIR=$1 +VERUS_ENCODING_TAR=$2 + +print_header "summarizing" +(cd summarize; + cargo build --release; + ./target/release/summarize ../$RESULTS_DIR ../$VERUS_ENCODING_TAR) \ No newline at end of file diff --git a/macro-stats/summarize/.gitignore b/macro-stats/summarize/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/macro-stats/summarize/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/macro-stats/summarize/Cargo.lock b/macro-stats/summarize/Cargo.lock new file mode 100644 index 0000000..86a66a0 --- /dev/null +++ b/macro-stats/summarize/Cargo.lock @@ -0,0 +1,303 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "libc" +version = "0.2.150" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" + +[[package]] +name = "linux-raw-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" + +[[package]] +name = "proc-macro2" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "rustix" +version = "0.38.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9470c4bf8246c8daf25f9598dca807fb6510347b1e1cfa55749113850c79d88a" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "serde" +version = "1.0.192" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.192" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "summarize" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", + "tempfile", +] + +[[package]] +name = "syn" +version = "2.0.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tempfile" +version = "3.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" diff --git a/macro-stats/summarize/Cargo.toml b/macro-stats/summarize/Cargo.toml new file mode 100644 index 0000000..1dcbca2 --- /dev/null +++ b/macro-stats/summarize/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "summarize" +version = "0.1.0" +edition = "2021" + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +tempfile = "3.8.1" diff --git a/macro-stats/summarize/encoding_bytes.sh b/macro-stats/summarize/encoding_bytes.sh new file mode 100644 index 0000000..9b9fd7e --- /dev/null +++ b/macro-stats/summarize/encoding_bytes.sh @@ -0,0 +1,11 @@ +set -e + +tmpdir_path=$1 +dir_name=$2 + +cd $tmpdir_path/$dir_name + +for project in ironsht nr verified-storage mimalloc page-table; do + byte_count=`wc -c $project/verus-encoding/* | grep ' total$' | sed 's/ total$//g'` + echo $project $byte_count +done diff --git a/macro-stats/summarize/src/main.rs b/macro-stats/summarize/src/main.rs new file mode 100644 index 0000000..dfb39a0 --- /dev/null +++ b/macro-stats/summarize/src/main.rs @@ -0,0 +1,412 @@ +use std::{path::{PathBuf, Path}, collections::HashMap}; + +fn warn_p(project_id: &str, msg: &str) { + eprintln!("warning: [{}] {}", project_id, msg); +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct VerificationResults { + encountered_vir_error: bool, + success: bool, + verified: u32, + errors: u32, + is_verifying_entire_crate: bool, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct VerificationTimesMs { + estimated_cpu_time: u64, + total: u64, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct VerificationOutput { + verification_results: VerificationResults, + times_ms: VerificationTimesMs, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct LineCountEntry { + #[serde(default)] + definitions: u64, + #[serde(default)] + proof: u64, + #[serde(default)] + layout: u64, + #[serde(default)] + comment: u64, + #[serde(default)] + trusted: u64, + #[serde(default)] + exec: u64, + #[serde(default)] + spec: u64, + #[serde(default)] + directives: u64, + #[serde(rename = "proof,exec")] #[serde(default)] + proof_exec: u64, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct LineCountOutput { + total: LineCountEntry, +} + +fn parse_verification_output(path: &Path) -> VerificationOutput { + let contents = std::fs::read_to_string(path).unwrap(); + serde_json::from_str(&contents).unwrap_or_else(|err| panic!("cannot parse {}: {}", path.display(), err)) +} + +fn parse_line_count_output(path: &Path) -> Option { + let contents = std::fs::read_to_string(path).ok()?; + Some(serde_json::from_str(&contents).unwrap()) +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct ModeSummaryVerus { + wall_time_verus_s: f64, + wall_time_s: f64, + estimated_cpu_time_verus_s: f64, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct ModeSummaryDafny { + wall_time_s: f64, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct ProjectSummaryVerus { + project_id: String, + success: bool, + singlethread: ModeSummaryVerus, + parallel: ModeSummaryVerus, + linecount: Option, + encoding_size_mb: Option, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct ProjectSummaryDafny { + project_id: String, + dafny_name: String, + singlethread: ModeSummaryDafny, + parallel: ModeSummaryDafny, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct LineCountSummary { + trusted: u64, + proof: u64, + exec: u64, + both_proof_exec: u64, + proof_exec_ratio: f32, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "kebab-case")] +struct ProjectSummary { + project_id: String, + verus: ProjectSummaryVerus, + dafny_baseline: Option, +} + +fn process_verus_project_time(project_id: &str, project_path: &Path, parallel: bool) -> (ModeSummaryVerus, bool) { + let output = parse_verification_output(&project_path.join(format!("verus-verification-{}.json", if parallel { "parallel" } else { "singlethread" }))); + assert!(output.verification_results.is_verifying_entire_crate); + if !output.verification_results.success { + warn_p(project_id, "verification failed"); + } + let time_contents = std::fs::read_to_string(project_path.join(format!("verus-verification-{}.time.txt", if parallel { "parallel" } else { "singlethread" }))).unwrap(); + let time_s = time_contents.parse::().unwrap(); + + (ModeSummaryVerus { + wall_time_verus_s: output.times_ms.total as f64 / 1000.0, + estimated_cpu_time_verus_s: output.times_ms.estimated_cpu_time as f64 / 1000.0, + wall_time_s: time_s, + }, output.verification_results.success) +} + +fn process_dafny_project_time(project_id: &str, project_path: &Path, dafny_name: &str, parallel: bool) -> ModeSummaryDafny { + let time_contents = std::fs::read_to_string(project_path.join(format!("{}-verification-{}.time.txt", dafny_name, if parallel { "parallel" } else { "singlethread" }))).unwrap(); + let mut time_s = time_contents.parse::().unwrap(); + + if project_id == "ironsht" { + let time_nonlinear_contents = std::fs::read_to_string(project_path.join(format!("{}-verification-{}-nonlinear.time.txt", dafny_name, if parallel { "parallel" } else { "singlethread" }))).unwrap(); + time_s += time_nonlinear_contents.parse::().unwrap(); + } + + ModeSummaryDafny { wall_time_s: time_s } +} + +fn process_verus_project_line_count(_project_id: &str, project_path: &Path) -> Option { + let output = parse_line_count_output(&project_path.join("verus-linecount.json"))?; + + let proof = output.total.proof + output.total.spec + output.total.proof_exec; + let exec = output.total.exec + output.total.proof_exec; + + Some(LineCountSummary { + trusted: output.total.trusted, + proof, + exec, + both_proof_exec: output.total.proof_exec, + proof_exec_ratio: proof as f32 / exec as f32, + }) +} + +fn process_verus_project(project_id: &str, project_path: &Path) -> ProjectSummaryVerus { + let (singlethread, singlethread_success) = process_verus_project_time(project_id, project_path, false); + let (parallel, parallel_success) = process_verus_project_time(project_id, project_path, true); + ProjectSummaryVerus { + project_id: project_id.to_owned(), + singlethread, + parallel, + linecount: process_verus_project_line_count(project_id, project_path), + success: singlethread_success && parallel_success, + encoding_size_mb: None, + } +} + +fn process_dafny_project(project_id: &str, project_path: &Path, dafny_name: &str) -> ProjectSummaryDafny { + let singlethread = process_dafny_project_time(project_id, project_path, dafny_name, false); + let parallel = process_dafny_project_time(project_id, project_path, dafny_name, true); + ProjectSummaryDafny { + project_id: project_id.to_owned(), + dafny_name: dafny_name.to_owned(), + singlethread, + parallel, + } +} + +const PROJECTS: &[(&str, Option<&str>)] = &[ + ("ironsht", Some("dafny")), + ("nr", Some("linear-dafny")), + ("page-table", None), + ("mimalloc", None), + ("verified-storage", None), +]; + +fn main() { + let mut args = std::env::args(); + args.next().unwrap(); + + let results = PathBuf::from(args.next().unwrap()); + let encodings_tar = args.next(); + + let json_out_file = results.join("results.json"); + let latex_commands_out_file = results.join("results-latex-commands.tex"); + let latex_table_out_file = results.join("results-latex-table.tex"); + + let num_threads = { + let verus_num_threads = std::fs::read_to_string(results.join("verus-num-threads.txt")).unwrap().trim().parse::().unwrap(); + for (_, dafny_baseline) in PROJECTS.iter() { + if let Some(dafny_name) = dafny_baseline { + let dafny_num_threads = std::fs::read_to_string(results.join(format!("{}-num-threads.txt", dafny_name))).unwrap().trim().parse::().unwrap(); + assert_eq!(dafny_num_threads, verus_num_threads); + } + } + verus_num_threads + }; + + let project_verus_encodings_mbs = encodings_tar.map(|encodings_tar| { + let mut project_verus_encoding_mbs = HashMap::new(); + + let temp_dir = tempfile::tempdir().expect("Failed to create temporary directory"); + use std::process::Command; + + let output = Command::new("tar") + .arg("-xf") + .arg(encodings_tar) + .arg("-C") + .arg(temp_dir.path()) + .output() + .expect("Failed to execute tar command"); + + if !output.status.success() { + panic!("Failed to extract tar.gz file: {}", String::from_utf8_lossy(&output.stderr)); + } + + let dir_name = { + let mut paths = std::fs::read_dir(temp_dir.path()).expect("the tar output is unexpected"); + let dir_name = paths.next().expect("one directory in the tar file") + .expect("valid fs call"); + assert!(paths.next().is_none()); + dir_name.file_name() + }; + + // let _ = std::io::Read::read(&mut std::io::stdin(), &mut [0u8]).unwrap(); + + let output = Command::new("bash") + .arg("encoding_bytes.sh") + .arg(temp_dir.path()) + .arg(dir_name) + .output() + .expect("Failed to execute count command"); + if !output.status.success() { + panic!("Failed to execute count command: {}", String::from_utf8_lossy(&output.stderr)); + } + + let out = String::from_utf8_lossy(&output.stdout); + + for l in out.lines() { + let mut s = l.split(" "); + let prj = s.next().expect("project name").to_owned(); + let count = (s.next().expect("byte count").parse::().expect("valid byte count") as f64 / 1_000_000.0).round() as u64; + project_verus_encoding_mbs.insert(prj, count); + } + project_verus_encoding_mbs + }); + + let summaries = PROJECTS.iter().map(|(project, dafny_baseline)| { + let mut s = ProjectSummary { + project_id: (*project).to_owned(), + verus: process_verus_project(project, &results.join(project)), + dafny_baseline: dafny_baseline.map(|dafny_name| process_dafny_project(project, &results.join(project), dafny_name)), + }; + if let Some(project_verus_encodings_mbs) = &project_verus_encodings_mbs { + s.verus.encoding_size_mb = Some(project_verus_encodings_mbs[*project]); + } + s + }).collect::>(); + + { + let mut json_out = std::fs::File::create(json_out_file).unwrap(); + serde_json::to_writer_pretty(&mut json_out, &summaries).unwrap(); + } + + fn project_id_name(project_id: &str) -> String { + project_id.split('-').map(|s| s.chars().next().unwrap().to_uppercase().chain(s.chars().skip(1)).collect::()).collect::() + } + + #[cfg(old)] + { + fn emit_commands_verus(summary: &ProjectSummaryVerus, latex_commands_out: &mut std::fs::File, project_id_name: &str) { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}Success}}{{{}}}", project_id_name, summary.success).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}SinglethreadWallTime}}{{{:.2}}}", project_id_name, summary.singlethread.wall_time_s).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}ParallelWallTime}}{{{:.2}}}", project_id_name, summary.parallel.wall_time_s).unwrap(); + if let Some(linecount) = &summary.linecount { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountTrusted}}{{{}}}", project_id_name, linecount.trusted).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProof}}{{{}}}", project_id_name, linecount.proof).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountExec}}{{{}}}", project_id_name, linecount.exec).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProofCodeRatio}}{{{:.2}}}", project_id_name, linecount.proof_exec_ratio).unwrap(); + } else { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountTrusted}}{{TODO}}", project_id_name).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProof}}{{TODO}}", project_id_name).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountExec}}{{TODO}}", project_id_name).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProofCodeRatio}}{{TODO}}", project_id_name).unwrap(); + } + } + + fn emit_commands_dafny_baseline(summary: &ProjectSummaryDafny, latex_commands_out: &mut std::fs::File, dafny_name: &str, project_id_name: &str) { + writeln!(latex_commands_out, "\\newcommand{{\\eval{}{}SinglethreadWallTime}}{{{:.2}}}", dafny_name, project_id_name, summary.singlethread.wall_time_s).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\eval{}{}ParallelWallTime}}{{{:.2}}}", dafny_name, project_id_name, summary.parallel.wall_time_s).unwrap(); + } + + let mut latex_commands_out = std::fs::File::create(latex_commands_out_file).unwrap(); + use std::io::Write; + writeln!(latex_commands_out, "\\newcommand{{\\evalVerusProjectCount}}{{{}}}", summaries.len()).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalParallelNumThreads}}{{{}}}", num_threads).unwrap(); + for summary in summaries.iter() { + let project_id_name = project_id_name(&summary.project_id); + + emit_commands_verus(&summary.verus, &mut latex_commands_out, &project_id_name); + + if let Some(dafny_baseline) = &summary.dafny_baseline { + // dafny name from "linear-dafny" to "LinearDafny" + let dafny_name = dafny_baseline.dafny_name.split('-').map(|s| s.chars().next().unwrap().to_uppercase().chain(s.chars().skip(1)).collect::()).collect::(); + emit_commands_dafny_baseline(dafny_baseline, &mut latex_commands_out, &dafny_name, &project_id_name); + } + } + } + + { + fn emit_commands_verus(summary: &ProjectSummaryVerus, latex_commands_out: &mut std::fs::File, project_id_name: &str) { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}Success}}{{{}}}", project_id_name, summary.success).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}SinglethreadWallTime}}{{{:.0}}}", project_id_name, summary.singlethread.wall_time_s).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}ParallelWallTime}}{{{:.0}}}", project_id_name, summary.parallel.wall_time_s).unwrap(); + if let Some(linecount) = &summary.linecount { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountTrusted}}{{{}}}", project_id_name, linecount.trusted).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProof}}{{{}}}", project_id_name, linecount.proof).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountExec}}{{{}}}", project_id_name, linecount.exec).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProofCodeRatio}}{{{:.1}}}", project_id_name, linecount.proof_exec_ratio).unwrap(); + } else { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountTrusted}}{{TODO}}", project_id_name).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProof}}{{TODO}}", project_id_name).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountExec}}{{TODO}}", project_id_name).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}LineCountProofCodeRatio}}{{TODO}}", project_id_name).unwrap(); + } + if let Some(encoding_mb) = &summary.encoding_size_mb { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}EncodingSizeMB}}{{{}}}", project_id_name, encoding_mb).unwrap(); + } else { + writeln!(latex_commands_out, "\\newcommand{{\\evalVerus{}EncodingSizeMB}}{{TODO}}", project_id_name).unwrap(); + } + } + + fn emit_commands_dafny_baseline(summary: &ProjectSummaryDafny, latex_commands_out: &mut std::fs::File, dafny_name: &str, project_id_name: &str) { + writeln!(latex_commands_out, "\\newcommand{{\\eval{}{}SinglethreadWallTime}}{{{:.0}}}", dafny_name, project_id_name, summary.singlethread.wall_time_s).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\eval{}{}ParallelWallTime}}{{{:.0}}}", dafny_name, project_id_name, summary.parallel.wall_time_s).unwrap(); + } + + let mut latex_commands_out = std::fs::File::create(latex_commands_out_file).unwrap(); + use std::io::Write; + writeln!(latex_commands_out, "\\newcommand{{\\evalVerusProjectCount}}{{{}}}", summaries.len()).unwrap(); + writeln!(latex_commands_out, "\\newcommand{{\\evalParallelNumThreads}}{{{}}}", num_threads).unwrap(); + for summary in summaries.iter() { + let project_id_name = project_id_name(&summary.project_id); + + emit_commands_verus(&summary.verus, &mut latex_commands_out, &project_id_name); + + if let Some(dafny_baseline) = &summary.dafny_baseline { + // dafny name from "linear-dafny" to "LinearDafny" + let dafny_name = dafny_baseline.dafny_name.split('-').map(|s| s.chars().next().unwrap().to_uppercase().chain(s.chars().skip(1)).collect::()).collect::(); + emit_commands_dafny_baseline(dafny_baseline, &mut latex_commands_out, &dafny_name, &project_id_name); + } + } + + // struct Totals { proof: u64, exec: u64, both_proof_exec: u64 } + // let mut t = Totals { proof: 0, exec: 0, both_proof_exec: 0 }; + // for summary in summaries.iter() { + // let Some(linecount) = &summary.verus.linecount else { panic!("missing line count") }; + // dbg!(&summary); + // t.proof += linecount.proof; + // t.exec += linecount.exec; + // t.both_proof_exec += linecount.both_proof_exec; + // } + // writeln!(latex_commands_out, "\\newcommand{{\\evalVerusTotalProofLines}}{{{:.1}K}}", t.proof as f64 / 1000.0).unwrap(); + // writeln!(latex_commands_out, "\\newcommand{{\\evalVerusTotalExecLines}}{{{:.1}K}}", t.exec as f64 / 1000.0).unwrap(); + // writeln!(latex_commands_out, "\\newcommand{{\\evalVerusTotalProofExecLines}}{{{:.1}}}", t.both_proof_exec).unwrap(); + } + + { + let mut latex_table_out = std::fs::File::create(latex_table_out_file).unwrap(); + use std::io::Write; + + writeln!(latex_table_out, "\\documentclass{{article}}").unwrap(); + + writeln!(latex_table_out, "\\input{{results-latex-commands}}").unwrap(); + + writeln!(latex_table_out, "\\begin{{document}}").unwrap(); + + writeln!(latex_table_out, "\\begin{{tabular}}{{l|c|c|c|c|c|c}}").unwrap(); + + writeln!(latex_table_out, "Project & \\multicolumn{{2}}{{c|}}{{Time (s)}} & \\multicolumn{{3}}{{c|}}{{Line Count}} \\\\").unwrap(); + writeln!(latex_table_out, " & 1 thread & \\evalParallelNumThreads threads & trusted & proof & exec \\\\").unwrap(); + writeln!(latex_table_out, "\\hline").unwrap(); + for summary in summaries.iter() { + let project_id_name = project_id_name(&summary.project_id); + writeln!(latex_table_out, "{} & \\evalVerus{}SinglethreadWallTime & \\evalVerus{}ParallelWallTime & \\evalVerus{}LineCountTrusted & \\evalVerus{}LineCountProof & \\evalVerus{}LineCountExec \\\\", project_id_name, project_id_name, project_id_name, project_id_name, project_id_name, project_id_name).unwrap(); + } + writeln!(latex_table_out, "\\end{{tabular}}").unwrap(); + + writeln!(latex_table_out, "\\end{{document}}").unwrap(); + } +} + diff --git a/macro-stats/time.py b/macro-stats/time.py new file mode 100644 index 0000000..dab3644 --- /dev/null +++ b/macro-stats/time.py @@ -0,0 +1,18 @@ +import sys +import subprocess +import time + +output_file = sys.argv[1] +time_file = sys.argv[2] +command = sys.argv[3:] + +start_time = time.time() + +with open(output_file, 'w') as f: + process = subprocess.run(command, stdout=f) + +end_time = time.time() +execution_time = end_time - start_time + +with open(time_file, 'w') as f: + f.write(str(execution_time)) diff --git a/macro-stats/util/entry-dafny.sh b/macro-stats/util/entry-dafny.sh new file mode 100644 index 0000000..1a530c6 --- /dev/null +++ b/macro-stats/util/entry-dafny.sh @@ -0,0 +1,8 @@ + +# apt-get update && \ +# apt-get install -y curl git build-essential unzip wget gcc-multilib python3 libicu-dev libgomp1 && \ +# curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y + +source "$HOME/.cargo/env" + +bash dafny.sh results diff --git a/macro-stats/util/entry-linear-dafny-early.sh b/macro-stats/util/entry-linear-dafny-early.sh new file mode 100644 index 0000000..599b208 --- /dev/null +++ b/macro-stats/util/entry-linear-dafny-early.sh @@ -0,0 +1,2 @@ + +bash linear-dafny-early.sh results diff --git a/macro-stats/util/entry-linear-dafny.sh b/macro-stats/util/entry-linear-dafny.sh new file mode 100644 index 0000000..2080a4a --- /dev/null +++ b/macro-stats/util/entry-linear-dafny.sh @@ -0,0 +1,2 @@ + +bash linear-dafny.sh results diff --git a/macro-stats/util/entry-summarize.sh b/macro-stats/util/entry-summarize.sh new file mode 100644 index 0000000..888ab4b --- /dev/null +++ b/macro-stats/util/entry-summarize.sh @@ -0,0 +1,8 @@ + +# apt-get update && \ +# apt-get install -y curl git build-essential wget python3 && \ +# curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y + +source "$HOME/.cargo/env" + +bash summarize.sh results results-m5.8xlarge-sosp-verus-encoding-only-no-arithmetic-lib.tar.gz diff --git a/macro-stats/util/entry-verus-all-triggers.sh b/macro-stats/util/entry-verus-all-triggers.sh new file mode 100644 index 0000000..9117011 --- /dev/null +++ b/macro-stats/util/entry-verus-all-triggers.sh @@ -0,0 +1,13 @@ + +# apt-get update && \ +# apt-get install -y curl git build-essential unzip wget gcc-multilib python3 && \ +# curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y +# + +apt-get update && \ + apt-get install -y singular \ + libpmem1 libpmemlog1 libpmem-dev libpmemlog-dev llvm-dev clang libclang-dev # verified-storage + +source "$HOME/.cargo/env" + +bash verus-all-triggers.sh results diff --git a/macro-stats/util/entry-verus.sh b/macro-stats/util/entry-verus.sh new file mode 100644 index 0000000..6de2b53 --- /dev/null +++ b/macro-stats/util/entry-verus.sh @@ -0,0 +1,13 @@ + +# apt-get update && \ +# apt-get install -y curl git build-essential unzip wget gcc-multilib python3 && \ +# curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y +# + +apt-get update && \ + apt-get install -y singular \ + libpmem1 libpmemlog1 libpmem-dev libpmemlog-dev llvm-dev clang libclang-dev # verified-storage + +source "$HOME/.cargo/env" + +bash verus.sh results diff --git a/macro-stats/util/entry.dockerfile b/macro-stats/util/entry.dockerfile new file mode 100644 index 0000000..5b27eed --- /dev/null +++ b/macro-stats/util/entry.dockerfile @@ -0,0 +1,6 @@ +FROM --platform=linux/amd64 ubuntu + +RUN apt-get update && \ + apt-get install -y curl git build-essential unzip wget gcc-multilib python3 libicu-dev libgomp1 && \ + curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --default-toolchain none -y && \ + bash -c "source $HOME/.cargo/env && rustup install 1.73.0" diff --git a/macro-stats/verus.sh b/macro-stats/verus.sh new file mode 100644 index 0000000..c5bfc97 --- /dev/null +++ b/macro-stats/verus.sh @@ -0,0 +1,147 @@ +#! /bin/bash + +export VERUS_SINGULAR_PATH="/usr/bin/Singular" + +VERUS_NUM_THREADS=8 + +set -e +set -x + +. lib.sh + +if [ $# -lt 1 ]; then + echo "usage: $0 " + exit 1 +fi + +RESULTS_DIR=$1 + +print_header "setting up repos/" + +if [ ! -d "repos" ]; then + mkdir repos +fi + +print_header "cloning or updating repositories" + +clone_and_update_repository "verus-main" "main" "https://github.com/verus-lang/verus.git" +clone_and_update_repository "verus-main-line-count" "main" "https://github.com/verus-lang/verus.git" +clone_and_update_repository "verified-node-replication" "main" "https://github.com/verus-lang/verified-node-replication.git" +clone_and_update_repository "verified-ironkv" "main" "https://github.com/verus-lang/verified-ironkv.git" +clone_and_update_repository "verified-nrkernel" "main" "https://github.com/utaal/verified-nrkernel.git" +clone_and_update_repository "verified-storage" "line_count_annotations" "https://github.com/microsoft/verified-storage.git" +clone_and_update_repository "verified-memory-allocator" "main" "https://github.com/verus-lang/verified-memory-allocator.git" + +print_header "getting z3" + +print_step "z3 for verus-main" + +(cd repos/verus-main/source; + if [ ! -f "z3" ]; then + bash ./tools/get-z3.sh + fi) + +print_header "building verus and line-count" + +print_step "building verus-main" +(cd repos/verus-main/source; . ../tools/activate; vargo build --release --features singular) + +# print_step "building verus-nr" +# (cd repos/verus-nr/source; . ../tools/activate; vargo build --release --features singular) + +print_step "building line_count" +(cd repos/verus-main-line-count/source/tools/line_count; cargo build --release) + +# ===================================================================================================== + +print_header "verifying" + +cd $RESULTS_DIR + +echo $VERUS_NUM_THREADS > verus-num-threads.txt + +run_verification() { + local result_dir=$1 + print_step "verifying $result_dir" + local exe_path=$2 + local num_threads=$3 + local crate_path=$4 + local extra_flags=${@:5} + + mkdir -p $result_dir && cd $result_dir + # if [ -d "verus-encoding" ]; then + # rm -R verus-encoding + # fi + # mkdir -p verus-encoding + + python3 ../../time.py verus-verification-parallel.json verus-verification-parallel.time.txt \ + $exe_path --emit=dep-info --time-expanded --no-report-long-running --output-json --num-threads=$num_threads $extra_flags \ + $crate_path + + python3 ../../time.py verus-verification-singlethread.json verus-verification-singlethread.time.txt \ + $exe_path --time-expanded --no-report-long-running --output-json --num-threads=1 $extra_flags \ + $crate_path + + # cp .verus-log/*.smt* verus-encoding/. + + # rm -R .verus-log + + pwd + + cd .. +} + +count_lines() { + local result_dir=$1 + print_step "counting lines for $result_dir" + local d_path=$2 + + cd $result_dir + + ../../repos/verus-main-line-count/source/target/release/line_count --json $d_path > verus-linecount.json + + rm $d_path + + cd .. +} + +VERUS_MAIN_EXE=../../repos/verus-main/source/target-verus/release/verus +# VERUS_NR_EXE=../../repos/verus-nr/source/target-verus/release/verus + +run_verification page-table $VERUS_MAIN_EXE $VERUS_NUM_THREADS ../../repos/verified-nrkernel/page-table/main.rs --cfg feature=\"impl\" --rlimit 60 +count_lines page-table main.d + +run_verification ironsht $VERUS_MAIN_EXE $VERUS_NUM_THREADS ../../repos/verified-ironkv/ironsht/src/lib.rs --crate-type=lib +count_lines ironsht lib.d + +run_verification nr $VERUS_MAIN_EXE $VERUS_NUM_THREADS ../../repos/verified-node-replication/verified-node-replication/src/lib.rs --crate-type=lib +count_lines nr lib.d + +print_step "preparing verified-storage" +(cd ../repos/verified-storage; + cd deps_hack; + cargo clean; + cargo +1.76.0 build; +) + +run_verification verified-storage $VERUS_MAIN_EXE $VERUS_NUM_THREADS \ + ../../repos/verified-storage/storage_node/src/lib.rs -L dependency=../../repos/verified-storage/deps_hack/target/debug/deps --extern=deps_hack=../../repos/verified-storage/deps_hack/target/debug/libdeps_hack.rlib +count_lines verified-storage lib.d + +print_step "preparing mimalloc" +(cd ../repos/verified-memory-allocator; + cd test_libc; + cargo clean; + cargo +1.76.0 build --release; + cd ..; + LIBC_RLIB_NAME=$(find ./test_libc/target/release/deps/ -name 'liblibc-*.rlib'); + mkdir -p build; + cp $LIBC_RLIB_NAME build/liblibc.rlib; +) + +run_verification mimalloc $VERUS_MAIN_EXE $VERUS_NUM_THREADS \ + ../../repos/verified-memory-allocator/verus-mimalloc/lib.rs --extern libc=../../repos/verified-memory-allocator/build/liblibc.rlib --rlimit 240 +count_lines mimalloc lib.d + + +cd .. # $RESULTS_DIR