diff --git a/README.md b/README.md index 13cfc3f..e2c728b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Zennit -![Zennit-Logo](https://raw.githubusercontent.com/chr5tphr/zennit/d3934e974bb7c685fc929786d6fc653474fbbc98/share/img/zennit.png) +![Zennit-Logo](https://raw.githubusercontent.com/chr5tphr/zennit/master/share/img/zennit.png) Zennit (**Z**ennit **e**xplains **n**eural **n**etworks **i**n **t**orch) @@ -14,9 +14,9 @@ state. ## Install -To install directly using pip, use: +To install directly from PyPI using pip, use: ```shell -$ pip install 'git+git://github.com/chr5tphr/zennit' +$ pip install zennit ``` Alternatively, install from a manually cloned repository to try out the examples: @@ -54,28 +54,42 @@ applicable modules, e.g. for ResNet50, the forward function (attribute) of the Bottleneck modules is overwritten to handle the residual connection. ## Example -Prepare the data needed for the example (requires cURL and (magic-)file): +This example requires bash, cURL and (magic-)file. + +Create a virtual environment, install Zennit and download the example scripts: +```shell +$ mkdir zennit-example +$ cd zennit-example +$ python -m venv .venv +$ .venv/bin/pip install zennit +$ curl -o feed_forward.py \ + 'https://raw.githubusercontent.com/chr5tphr/zennit/master/share/example/feed_forward.py' +$ curl -o download-lighthouses.sh \ + 'https://raw.githubusercontent.com/chr5tphr/zennit/master/share/scripts/download-lighthouses.sh' +``` + +Prepare the data needed for the example : ```shell -$ mkdir -p share/params share/data share/results -$ bash share/scripts/subimagenet.sh --n-total 8 --wnid n02814860 --output share/data/tiny_imagenet -$ curl -o share/params/vgg16-397923af.pth 'https://download.pytorch.org/models/vgg16-397923af.pth' +$ mkdir params data results +$ bash download-lighthouses.sh --output data/lighthouses +$ curl -o params/vgg16-397923af.pth 'https://download.pytorch.org/models/vgg16-397923af.pth' ``` -This creates the needed directories and downloads the pre-trained vgg16 parameters and a tiny subset of imagenet with the required label-directory structure and 8 samples of class *beacon* (n02814860). +This creates the needed directories and downloads the pre-trained vgg16 parameters and 8 images of light houses from wikimedia commons into the required label-directory structure for the imagenet dataset in Pytorch. -The example at `share/example/feed_forward.py` may then be run using: +The `feed_forward.py` example may then be run using: ```shell -$ python share/example/feed_forward.py \ - share/data/tiny_imagenet \ - 'share/results/vgg16_epsilon_gamma_box_{sample:02d}.png' \ - --inputs 'share/results/vgg16_input_{sample:02d}.png' \ - --parameters share/params/vgg16-397923af.pth \ +$ .venv/bin/python feed_forward.py \ + data/lighthouses \ + 'results/vgg16_epsilon_gamma_box_{sample:02d}.png' \ + --inputs 'results/vgg16_input_{sample:02d}.png' \ + --parameters params/vgg16-397923af.pth \ --model vgg16 \ --composite epsilon_gamma_box ``` -which computes the lrp heatmaps according to the `epsilon_gamma_box` rule and stores them in `share/results`, along with the respective input images. +which computes the lrp heatmaps according to the `epsilon_gamma_box` rule and stores them in `results`, along with the respective input images. The resulting heatmaps may look like the following: -![beacon heatmaps](https://raw.githubusercontent.com/chr5tphr/zennit/d3934e974bb7c685fc929786d6fc653474fbbc98/share/img/beacon_vgg16_epsilon_gamma_box.png) +![beacon heatmaps](https://raw.githubusercontent.com/chr5tphr/zennit/master/share/img/beacon_vgg16_epsilon_gamma_box.png) The following is a slightly modified exerpt of `share/example/feed_forward.py`: ```python diff --git a/share/img/beacon_vgg16_epsilon_gamma_box.png b/share/img/beacon_vgg16_epsilon_gamma_box.png index 28f3e89..757b825 100644 Binary files a/share/img/beacon_vgg16_epsilon_gamma_box.png and b/share/img/beacon_vgg16_epsilon_gamma_box.png differ diff --git a/share/scripts/subimagenet.sh b/share/scripts/download-lighthouses.sh similarity index 89% rename from share/scripts/subimagenet.sh rename to share/scripts/download-lighthouses.sh index db96748..58543b0 100644 --- a/share/scripts/subimagenet.sh +++ b/share/scripts/download-lighthouses.sh @@ -94,6 +94,18 @@ wnids=( n12768682 n12985857 n12998815 n13037406 n13040303 n13044778 n13052670 n13054560 n13133613 n15075141 ) +wnid="n02814860" + +URLS=( + 'https://upload.wikimedia.org/wikipedia/commons/thumb/8/8b/2006_09_06_180_Leuchtturm.jpg/640px-2006_09_06_180_Leuchtturm.jpg' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/5/5b/2014_Leuchtturm_Kap_Arkona_02.jpg/320px-2014_Leuchtturm_Kap_Arkona_02.jpg' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/d/d5/2013-12-06_Orkan_Xaver_in_Warnem%C3%BCnde_12.jpg/640px-2013-12-06_Orkan_Xaver_in_Warnem%C3%BCnde_12.jpg' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/3/37/Leuchtturm_Dornbusch_2012.JPG/321px-Leuchtturm_Dornbusch_2012.JPG' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/0/0d/Neuer_Leuchtturm_Arkona_2012.jpg/640px-Neuer_Leuchtturm_Arkona_2012.jpg' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/7/74/Pilsumer_Leuchtturm_2010-10_CN-I.jpg/640px-Pilsumer_Leuchtturm_2010-10_CN-I.jpg' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/1/13/Lindau_Harbor_Lake_Constance_01.jpg/640px-Lindau_Harbor_Lake_Constance_01.jpg' + 'https://upload.wikimedia.org/wikipedia/commons/thumb/f/fd/Heligoland_07-2016_photo28.jpg/640px-Heligoland_07-2016_photo28.jpg' +) process_file() { mime="$(file --brief --mime-type "${1}")" @@ -116,18 +128,16 @@ die() { usage() { cat </dev/null || die "curl not available!" 1 command -v file >/dev/null || die "file not available!" 1 mkdir -p "${wnids[@]/#/"$output/"}" -echo -ne "Fetching URLs for \x1b[1m${wnid}\x1b[0m..." -fetchtmp="${output}/${wnid}/urls.temp" -if ! curl \ - 'http://www.image-net.org/api/text/imagenet.synset.geturls' \ - --silent \ - --get \ - --data-urlencode "wnid=${wnid}" \ - --output "${fetchtmp}"; then - echo - rm -f "$fetchtmp" - die "Failed to fetch URLs!" 1 -fi -echo -e "received \x1b[1m$(wc -l "$fetchtmp" | cut -d' ' -f1)\x1b[0m URLs" - -mapfile -t urls < <( shuf "$fetchtmp" | tr -d '\r' ) -rm -f "$fetchtmp" echo -n "Downloading: " n_loaded=0 -for url in "${urls[@]}"; do +for url in "${URLS[@]}"; do fname="${output}/${wnid}/image.temp" curl "${url}" --silent --location --connect-timeout 3 --max-time 10 --output "${fname}" if process_file "$fname"; then @@ -205,10 +187,6 @@ for url in "${urls[@]}"; do else echo -ne "\x1b[31m.\x1b[0m" fi - - if (( n_loaded >= n_total )); then - break; - fi done echo -e "\nDownloaded ${n_loaded} images to '$(readlink -f "${output}")'"