Skip to content

Link to shadow discussions instead of old shadow-plugin-tor repo #251

Link to shadow discussions instead of old shadow-plugin-tor repo

Link to shadow discussions instead of old shadow-plugin-tor repo #251

Workflow file for this run

# Syntax reference:
# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions
# When changing name, also change `workflows` in
# `.github/workflows/cancel_prev_run_all_steps.yml` to match.
name: Run all steps
permissions: read-all
on:
push:
paths-ignore:
- '**.md'
- 'LICENSE'
pull_request:
types: [opened, synchronize]
env:
DEBIAN_FRONTEND: noninteractive
SHADOW_COMMIT: 7ba17fb598d930c1368a6af08bb701ac7d16051d
TOR_REPO: https://git.torproject.org/tor.git
TOR_BRANCH: release-0.4.7
TOR_COMMIT: tor-0.4.7.13
# Optimization - this must be older than $TOR_COMMIT, but ideally not much
# older.
TOR_SHALLOW_SINCE: '2023-01-01'
TGEN_COMMIT: 3d7788bad362b4487d1145da93ab2fdb73c9b639
ONIONTRACE_COMMIT: 9e0e83ceb0765bc915a2888bcc02e68a5a9b848f
NETDATA_MONTH: '2020-11'
NETDATA_LAST_DAY: '30'
# Increment to invalidate caches
CACHE_VERSION: 1
jobs:
run-all-steps:
# Keep synchronized with `CONTAINER` below
runs-on: ubuntu-22.04
env:
CC: gcc
CXX: g++
# Keep synchronized with `runs-on`, above
CONTAINER: ubuntu:22.04
BUILDTYPE: release
RUSTPROFILE: minimal
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Update packages
run: sudo apt-get update
- name: Install tornettools dependencies
run: sudo apt-get install -y
python3
python3-dev
python3-pip
libxml2
libxml2-dev
libxslt1.1
libxslt1-dev
libpng16-16
libpng-dev
libfreetype6
libfreetype6-dev
libblas-dev
liblapack-dev
stow
- name: Build tornettools
run: |
pip3 install wheel
pip3 install -r requirements.txt
pip3 install -I .
- name: Restore shadow build cache
id: restore-shadow-build-cache
uses: actions/cache@v2
with:
path: |
~/opt/shadow
key: shadow-${{ env.SHADOW_COMMIT }}-${{ env.CACHE_VERSION }}
- name: Checkout shadow
uses: actions/checkout@v2
with:
repository: shadow/shadow
ref: ${{ env.SHADOW_COMMIT }}
path: shadow
- name: Install shadow deps
run: |
cd shadow
sudo --preserve-env ci/container_scripts/install_deps.sh
sudo --preserve-env ci/container_scripts/install_extra_deps.sh
- name: Build shadow
if: steps.restore-shadow-build-cache.outputs.cache-hit != 'true'
run: |
cd shadow
./setup build -j`nproc` -p ~/opt/shadow
./setup install
- name: Install tor deps
run: sudo apt-get install -y autoconf automake gcc libevent-dev libssl-dev make
- name: Restore tor build cache
id: restore-tor-build-cache
uses: actions/cache@v2
with:
path: |
~/opt/tor
key: tor-${{ env.TOR_COMMIT }}-${{ env.CACHE_VERSION }}
- name: Build tor
if: steps.restore-tor-build-cache.outputs.cache-hit != 'true'
run: |
git clone --shallow-since=$TOR_SHALLOW_SINCE -b $TOR_BRANCH $TOR_REPO tor
cd tor
git checkout $TOR_COMMIT
./autogen.sh
./configure --disable-asciidoc --disable-unittests --disable-manpage --disable-html-manual --disable-lzma --disable-zstd --prefix=$HOME/opt/tor
make -j$(nproc)
make install
- name: Checkout tgen
uses: actions/checkout@v2
with:
repository: shadow/tgen
ref: ${{ env.TGEN_COMMIT }}
path: tgen
- name: Build tgen deps
run: sudo apt-get install -y cmake gcc libglib2.0-dev libigraph-dev make
- name: Build tgen
run: |
cd tgen
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=$HOME/opt/tgen
make -j`nproc`
make install
pip3 install -r ../tools/requirements.txt
pip3 install -I ../tools
- name: Checkout oniontrace
uses: actions/checkout@v2
with:
repository: shadow/oniontrace
ref: ${{ env.ONIONTRACE_COMMIT }}
path: oniontrace
- name: Install oniontrace deps
run: sudo apt-get install -y cmake gcc libglib2.0-0 libglib2.0-dev make
- name: Build oniontrace
run: |
cd oniontrace
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=$HOME/opt/oniontrace
make -j`nproc`
make install
pip3 install -r ../tools/requirements.txt
pip3 install -I ../tools
- name: Restore net data cache
id: restore-net-data-cache
uses: actions/cache@v2
with:
path: netdata
key: netdata-${{ env.NETDATA_MONTH }}-${{ env.NETDATA_LAST_DAY}}-${{ env.CACHE_VERSION }}
- name: Download net data
if: steps.restore-net-data-cache.outputs.cache-hit != 'true'
run: |
mkdir -p netdata
cd netdata
wget -O - https://collector.torproject.org/archive/relay-descriptors/consensuses/consensuses-$NETDATA_MONTH.tar.xz | tar xJ
wget -O - https://collector.torproject.org/archive/relay-descriptors/server-descriptors/server-descriptors-$NETDATA_MONTH.tar.xz | tar xJ
wget https://metrics.torproject.org/userstats-relay-country.csv
wget -O - https://collector.torproject.org/archive/onionperf/onionperf-$NETDATA_MONTH.tar.xz | tar xJ
wget -O bandwidth-$NETDATA_MONTH.csv "https://metrics.torproject.org/bandwidth.csv?start=$NETDATA_MONTH-01&end=$NETDATA_MONTH-$NETDATA_LAST_DAY"
# Drop most of the consensuses. This saves about ~6m in the `stage` step on GitHub's CI runner.
find consensuses-*/ -type f | tail -n +100 | xargs rm
- name: Checkout tmodel
uses: actions/checkout@v2
with:
repository: tmodel-ccs2018/tmodel-ccs2018.github.io
path: tmodel-ccs2018.github.io
- name: Install custom deps
run: |
stow -d ~/opt -t ~/.local shadow
stow -d ~/opt -t ~/.local tor
stow -d ~/opt -t ~/.local tgen
stow -d ~/opt -t ~/.local oniontrace
- name: Stage
run: tornettools
--seed 1
stage
netdata/consensuses-*
netdata/server-descriptors-*
netdata/userstats-relay-country.csv
tmodel-ccs2018.github.io
--onionperf_data_path netdata/onionperf-*
--bandwidth_data_path netdata/bandwidth-*.csv
--geoip_path $HOME/opt/tor/share/tor/geoip
- name: Generate
run: |
# Needed for fake certificate timestamps
sudo apt-get install -y faketime
# Simulate a very small network to keep this test fast.
NETWORK_SCALE=0.0001
# Likewise use a relatively small number of perf nodes, but still
# enough to get somewhat meaningful data.
TORPERF_N=10
# At this network size, 1.0 would only give us ~4 onion service
# markov clients, and 1 perf client.
ONION_SERVICE_USER_SCALE=5.0
tornettools \
--seed 1 \
generate \
relayinfo_staging_*.json \
userinfo_staging_*.json \
networkinfo_staging.gml \
tmodel-ccs2018.github.io \
--network_scale $NETWORK_SCALE \
--torperf_num_exit $TORPERF_N \
--torperf_num_onion_service $TORPERF_N \
--onion_service_user_scale $ONION_SERVICE_USER_SCALE \
--prefix tornet
- name: Simulate
run: |
# Use short simulation time
sed -i 's/stop_time:.*/stop_time: "15m"/' tornet/shadow.config.yaml
tornettools --seed 1 simulate --args "--parallelism=$(nproc) --seed=1 --template-directory=shadow.data.template --progress=true" tornet
- name: Parse
# We use a relatively low convergence time (-c) since we're only
# simulating 10m.
run: tornettools parse -c 300 tornet
- name: Plot
# Plot against saved artifacts from a previous run, for comparison.
run: tornettools plot --tor_metrics_path tor_metrics_*.json --prefix pdfs -l golden current -- .github/workflows/run_all_steps_output tornet
- name: Generate README
run: |
echo "Generated by run_all_steps workflow. To update, unpack the
artifacts from a more recent run of this workflow into this
directory." > tornet/README
echo "env:" >> tornet/README
for v in \
GITHUB_REF \
GITHUB_SHA \
SHADOW_COMMIT \
TOR_REPO \
TOR_BRANCH \
TOR_COMMIT \
TGEN_COMMIT \
ONIONTRACE_COMMIT \
NETDATA_MONTH \
NETDATA_LAST_DAY \
; do echo " $v: ${!v:-}" >> tornet/README; done
echo "nproc: " $(nproc) >> tornet/README
echo "free: " >> tornet/README
free >> tornet/README
- name: Archive
run: tornettools archive tornet
- name: Upload tornet
uses: actions/upload-artifact@v2
if: failure()
with:
name: tornet
path: tornet
retention-days: 5
# Use this artifact to update .github/workflow/parse_and_plot_input when
# necessary - e.g. when the output of the tgen or oniontrace parsers
# changes in some significant way.
- name: Upload parse_and_plot input
uses: actions/upload-artifact@v2
with:
name: parse_and_plot_input
path: |
tornet/README
tornet/tornettools*.log*
tornet/*.json.xz
# Use this artifact to update .github/workflow/run_all_steps_output, to
# update the "golden" baseline in comparison graphs of future runs.
# Typically this is only needed when the output changes in some
# significant way.
- name: Upload plot data
uses: actions/upload-artifact@v2
with:
name: run_all_steps_output
path: |
tornet/README
tornet/tornet.plot.data/*.json
# This artifact is for a human to judge whether the plots are
# "reasonable", and whether significant changes from the previous output
# are expected. Since the simulations are currently not deterministic,
# the lines from the current run are not expected to be identical to the
# lines from the previous run.
- name: Upload plots
uses: actions/upload-artifact@v2
with:
name: plots
path: pdfs/*.pdf