Skip to content

Commit

Permalink
Added benchmarking scripts for the different tuning algorithms
Browse files Browse the repository at this point in the history
  • Loading branch information
AbhinavBehal committed May 31, 2019
1 parent fc4ce16 commit 0defc4c
Show file tree
Hide file tree
Showing 7 changed files with 94 additions and 5 deletions.
7 changes: 5 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down Expand Up @@ -187,4 +186,8 @@ fabric.properties
.idea/httpRequests

# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
.idea/caches/build_file_checksums.ser

# Benchmarking results
benchmarking/results
benchmarking/.temp
27 changes: 27 additions & 0 deletions benchmarking/asha.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#!/usr/bin/env bash

current_folder="${BASH_SOURCE%/*}"
results_file="${current_folder}/results/asha.csv"
temp_file="${current_folder}/.temp"

mkdir -p "${current_folder}/results" && touch ${results_file}

echo "Workers,Min_R,Max_R,Reduction_Factor,Early_Stopping_Rounds,Time,Score" > ${results_file}

for i in 2 4 8 16 32 64 128 256 512 1024; do
workers=50
min_r=1
max_r=${i}
early_stopping_rounds=${i}
reduction_factor=2

params="{\"n_workers\": ${i}, \"min_r\": ${min_r}, \"max_r\": ${max_r},\
\"reduction_factor\": ${reduction_factor}, \"early_stopping_rounds\": ${early_stopping_rounds},\"cv\": 3}"

python "${current_folder}/../main.py" -a asha -p "${params}" > ${temp_file}
score=`grep -oP "(?<=Best score: ).+" ${temp_file}`
time=`grep -oP "(?<=Took: )[\\d\\.]+" ${temp_file}`
echo "${workers},${min_r},${max_r},${reduction_factor},${early_stopping_rounds},${time},${score}" >> ${results_file}
done

rm -f ${temp_file} &> /dev/null
17 changes: 17 additions & 0 deletions benchmarking/grid_search.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#!/usr/bin/env bash

current_folder="${BASH_SOURCE%/*}"
results_file="${current_folder}/results/grid_search.csv"
temp_file="${current_folder}/.temp"

mkdir -p "${current_folder}/results" && touch ${results_file}

python "${current_folder}/../main.py" -a grid -p '{"cv": 3}' > ${temp_file}

score=`grep -oP "(?<=Best score: ).+" ${temp_file}`
time=`grep -oP "(?<=Took: )[\\d\\.]+" ${temp_file}`

echo "Time,Score" > ${results_file}
echo "${time},${score}" >> ${results_file}

rm -f ${temp_file} &> /dev/null
18 changes: 18 additions & 0 deletions benchmarking/random_search.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
#!/usr/bin/env bash

current_folder="${BASH_SOURCE%/*}"
results_file="${current_folder}/results/random_search.csv"
temp_file="${current_folder}/.temp"

mkdir -p "${current_folder}/results" && touch ${results_file}

echo "Iterations,Time,Score" > ${results_file}

for i in 5 10 15 20 25 30 40 50; do
python "${current_folder}/../main.py" -a random -p "{\"n_iter\": ${i}, \"cv\": 3}" > ${temp_file}
score=`grep -oP "(?<=Best score: ).+" ${temp_file}`
time=`grep -oP "(?<=Took: )[\\d\\.]+" ${temp_file}`
echo "${i},${time},${score}" >> ${results_file}
done

rm -f ${temp_file} &> /dev/null
24 changes: 24 additions & 0 deletions benchmarking/sha.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/usr/bin/env bash

current_folder="${BASH_SOURCE%/*}"
results_file="${current_folder}/results/sha.csv"
temp_file="${current_folder}/.temp"

mkdir -p "${current_folder}/results" && touch ${results_file}

echo "Configs,Min_R,Max_R,Reduction_Factor,Time,Score" > ${results_file}

for i in 2 4 8 16 32 64 128 256 512 1024; do
min_r=1
max_r=${i}
reduction_factor=2
params="{\"n_configs\": ${i}, \"min_r\": ${min_r}, \"max_r\": ${max_r},\
\"reduction_factor\": ${reduction_factor}, \"cv\": 3}"

python "${current_folder}/../main.py" -a sha -p "${params}" > ${temp_file}
score=`grep -oP "(?<=Best score: ).+" ${temp_file}`
time=`grep -oP "(?<=Took: )[\\d\\.]+" ${temp_file}`
echo "${i},${min_r},${max_r},${reduction_factor},${time},${score}" >> ${results_file}
done

rm -f ${temp_file} &> /dev/null
5 changes: 3 additions & 2 deletions tuning/asha.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@ def __lt__(self, other):


def run(n_workers, min_r, max_r, reduction_factor, early_stopping_rounds, cv):
# 5 minute timeout for the lambda
boto_config = Config(read_timeout=60 * 5)
# 10 minute timeout for the lambda
time_out = 10 * 60
boto_config = Config(read_timeout=time_out)
client = boto3.client('lambda', config=boto_config)

rungs = []
Expand Down
1 change: 0 additions & 1 deletion tuning/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
'max_delta_step': [0, 2],
'subsample': [0.5, 1.0],
'colsample_bytree': [0.0, 0.5, 1.0],
'tree_method': ['auto']
}


Expand Down

0 comments on commit 0defc4c

Please sign in to comment.