Skip to content

Commit

Permalink
merge from main
Browse files Browse the repository at this point in the history
  • Loading branch information
Aske-Rosted committed Oct 27, 2023
2 parents 6654759 + d1b97d5 commit 93038c6
Show file tree
Hide file tree
Showing 82 changed files with 1,274 additions and 571 deletions.
43 changes: 38 additions & 5 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,24 @@ jobs:
name: Unit tests - IceTray
needs: [ check-codeclimate-credentials ]
runs-on: ubuntu-latest
container: icecube/icetray:combo-stable
container:
image: icecube/icetray:icetray-prod-v1.8.1-ubuntu20.04-X64
options: --user root
steps:
- name: install git
run: |
apt-get --yes install sudo
sudo apt update --fix-missing --yes
sudo apt upgrade --yes
sudo apt-get install --yes git-all
- name: Set environment variables
run: |
echo "PATH=/usr/local/icetray/bin:$PATH" >> $GITHUB_ENV
echo "PYTHONPATH=/usr/local/icetray/lib:$PYTHONPATH" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=/usr/local/icetray/lib:/usr/local/icetray/cernroot/lib:/usr/local/icetray/lib/tools:$LD_LIBRARY_PATH" >> $GITHUB_ENV
- uses: actions/checkout@v3
- name: Print available disk space before graphnet install
run: df -h
- name: Upgrade packages already installed on icecube/icetray
run: |
pip install --upgrade astropy # Installed version incompatible with numpy 1.23.0 [https://github.com/astropy/astropy/issues/12534]
Expand All @@ -55,8 +65,10 @@ jobs:
editable: true
- name: Run unit tests and generate coverage report
run: |
coverage run --source=graphnet -m pytest tests/
coverage run --source=graphnet -m pytest tests/ --ignore=tests/examples/04_training
coverage run -a --source=graphnet -m pytest tests/examples/04_training
coverage xml -o coverage.xml
- name: Work around permission issue
run: |
git config --global --add safe.directory /__w/graphnet/graphnet
Expand All @@ -81,15 +93,29 @@ jobs:
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Print available disk space before graphnet install
run: df -h
- name: Install package
uses: ./.github/actions/install
with:
editable: true
- name: Print available disk space after graphnet install
run: df -h
- name: Print packages in pip
run: |
pip show torch
pip show torch-geometric
pip show torch-cluster
pip show torch-sparse
pip show torch-scatter
- name: Run unit tests and generate coverage report
run: |
set -o pipefail # To propagate exit code from pytest
coverage run --source=graphnet -m pytest tests/ --ignore=tests/data/ --ignore=tests/deployment/
coverage run --source=graphnet -m pytest tests/ --ignore=tests/utilities --ignore=tests/data/ --ignore=tests/deployment/ --ignore=tests/examples/01_icetray/
coverage run -a --source=graphnet -m pytest tests/utilities
coverage report -m
- name: Print available disk space after unit tests
run: df -h

build-macos:
name: Unit tests - macOS
Expand All @@ -105,8 +131,15 @@ jobs:
with:
editable: true
hardware: "macos"
- name: Print packages in pip
run: |
pip show torch
pip show torch-geometric
pip show torch-cluster
pip show torch-sparse
pip show torch-scatter
- name: Run unit tests and generate coverage report
run: |
set -o pipefail # To propagate exit code from pytest
coverage run --source=graphnet -m pytest tests/ --ignore=tests/data/ --ignore=tests/deployment/
coverage report -m
coverage run --source=graphnet -m pytest tests/ --ignore=tests/data/ --ignore=tests/deployment/ --ignore=tests/examples/
coverage report -m
22 changes: 22 additions & 0 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,28 @@ jobs:
packages: write
contents: read
steps:
- name: Before Clean-up
run: |
echo "Free space:"
df -h
- name: Free Disk Space
uses: jlumbroso/free-disk-space@main
with:
tool-cache: true

# all of these default to true, but feel free to set to
# false if necessary for your workflow
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true

- name: After Clean-up
run: |
echo "Free space:"
df -h
- name: Checkout
uses: actions/checkout@v3
- name: Set up QEMU
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ $ conda create --name graphnet python=3.8 gcc_linux-64 gxx_linux-64 libgcc cudat
$ conda activate graphnet # Optional
(graphnet) $ pip install -r requirements/torch_cpu.txt -e .[develop,torch] # CPU-only torch
(graphnet) $ pip install -r requirements/torch_gpu.txt -e .[develop,torch] # GPU support
(graphnet) $ pip install -r requirements/torch_macos.txt -e .[develop,torch] # On macOS
```
This should allow you to e.g. run the scripts in [examples/](./examples/) out of the box.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ graph_definition:
node_definition:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [dom_x, dom_y, dom_z, dom_time, charge, rde, pmt_area]
input_feature_names: [dom_x, dom_y, dom_z, dom_time, charge, rde, pmt_area]
class_name: KNNGraph
pulsemaps:
- SRTTWOfflinePulsesDC
Expand Down
2 changes: 1 addition & 1 deletion configs/datasets/test_data_sqlite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ graph_definition:
node_definition:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
index_column: event_no
loss_weight_column: null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ graph_definition:
node_definition:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
pulsemaps:
- total
Expand Down
2 changes: 1 addition & 1 deletion configs/datasets/training_example_data_parquet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ graph_definition:
node_definition:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
pulsemaps:
- total
Expand Down
2 changes: 1 addition & 1 deletion configs/datasets/training_example_data_sqlite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ graph_definition:
node_definition:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
pulsemaps:
- total
Expand Down
3 changes: 2 additions & 1 deletion configs/models/dynedge_PID_classification_example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ arguments:
ModelConfig:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
optimizer_class: '!class torch.optim.adam Adam'
optimizer_kwargs: {eps: 0.001, lr: 0.001}
Expand All @@ -38,6 +38,7 @@ arguments:
- ModelConfig:
arguments:
nb_outputs: 3 # number of classes
prediction_labels: ['noise', 'muon', 'neutrino']
hidden_size: 128
loss_function:
ModelConfig:
Expand Down
2 changes: 1 addition & 1 deletion configs/models/dynedge_position_custom_scaling_example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ arguments:
ModelConfig:
arguments: {}
class_name: NodesAsPulses
node_feature_names: null
input_feature_names: null
class_name: KNNGraph
gnn:
ModelConfig:
Expand Down
44 changes: 0 additions & 44 deletions configs/models/dynedge_position_example.yml

This file was deleted.

2 changes: 1 addition & 1 deletion configs/models/example_direction_reconstruction_model.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ arguments:
ModelConfig:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
gnn:
ModelConfig:
Expand Down
2 changes: 1 addition & 1 deletion configs/models/example_energy_reconstruction_model.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ arguments:
ModelConfig:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
optimizer_class: '!class torch.optim.adam Adam'
optimizer_kwargs: {eps: 0.001, lr: 0.001}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ arguments:
ModelConfig:
arguments: {}
class_name: NodesAsPulses
node_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
input_feature_names: [sensor_pos_x, sensor_pos_y, sensor_pos_z, t]
class_name: KNNGraph
optimizer_class: '!class torch.optim.adam Adam'
optimizer_kwargs: {eps: 0.001, lr: 0.001}
Expand Down
23 changes: 14 additions & 9 deletions examples/01_icetray/01_convert_i3_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
from graphnet.constants import EXAMPLE_OUTPUT_DIR, TEST_DATA_DIR
from graphnet.data.extractors import (
I3FeatureExtractorIceCubeUpgrade,
I3FeatureExtractorIceCube86,
I3RetroExtractor,
I3TruthExtractor,
I3GenericExtractor,
)
from graphnet.data.dataconverter import DataConverter
from graphnet.data.parquet import ParquetDataConverter
Expand All @@ -16,7 +16,17 @@
from graphnet.utilities.imports import has_icecube_package
from graphnet.utilities.logging import Logger

from _common_icetray import ERROR_MESSAGE_MISSING_ICETRAY
ERROR_MESSAGE_MISSING_ICETRAY = (
"This example requires IceTray to be installed, which doesn't seem to be "
"the case. Please install IceTray; run this example in the GraphNeT "
"Docker container which comes with IceTray installed; or run an example "
"script in one of the other folders:"
"\n * examples/02_data/"
"\n * examples/03_weights/"
"\n * examples/04_training/"
"\n * examples/05_pisa/"
"\nExiting."
)

CONVERTER_CLASS = {
"sqlite": SQLiteDataConverter,
Expand All @@ -34,12 +44,7 @@ def main_icecube86(backend: str) -> None:

converter: DataConverter = CONVERTER_CLASS[backend](
[
I3GenericExtractor(
keys=[
"SRTInIcePulses",
"I3MCTree",
]
),
I3FeatureExtractorIceCube86("SRTInIcePulses"),
I3TruthExtractor(),
],
outdir,
Expand Down Expand Up @@ -94,7 +99,7 @@ def main_icecube_upgrade(backend: str) -> None:
"detector", choices=["icecube-86", "icecube-upgrade"]
)

args = parser.parse_args()
args, unknown = parser.parse_known_args()

# Run example script
if args.detector == "icecube-86":
Expand Down
16 changes: 13 additions & 3 deletions examples/01_icetray/02_compare_sqlite_and_parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,17 @@
from graphnet.utilities.imports import has_icecube_package
from graphnet.utilities.logging import Logger

from _common_icetray import ERROR_MESSAGE_MISSING_ICETRAY
ERROR_MESSAGE_MISSING_ICETRAY = (
"This example requires IceTray to be installed, which doesn't seem to be "
"the case. Please install IceTray; run this example in the GraphNeT "
"Docker container which comes with IceTray installed; or run an example "
"script in one of the other folders:"
"\n * examples/02_data/"
"\n * examples/03_weights/"
"\n * examples/04_training/"
"\n * examples/05_pisa/"
"\nExiting."
)

OUTPUT_DIR = f"{EXAMPLE_OUTPUT_DIR}/compare_sqlite_and_parquet"
PULSEMAP = "SRTInIcePulses"
Expand All @@ -34,7 +44,7 @@ def convert_data() -> None:
I3FeatureExtractorIceCube86(PULSEMAP),
],
outdir=OUTPUT_DIR,
workers=10,
workers=1,
)

# Run data converters.
Expand Down Expand Up @@ -85,7 +95,7 @@ def load_data() -> None:
"""
)

args = parser.parse_args()
args, unknown = parser.parse_known_args()

# Run example script(s)
convert_data()
Expand Down
14 changes: 12 additions & 2 deletions examples/01_icetray/03_i3_deployer_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,17 @@
I3InferenceModule,
)

from _common_icetray import ERROR_MESSAGE_MISSING_ICETRAY
ERROR_MESSAGE_MISSING_ICETRAY = (
"This example requires IceTray to be installed, which doesn't seem to be "
"the case. Please install IceTray; run this example in the GraphNeT "
"Docker container which comes with IceTray installed; or run an example "
"script in one of the other folders:"
"\n * examples/02_data/"
"\n * examples/03_weights/"
"\n * examples/04_training/"
"\n * examples/05_pisa/"
"\nExiting."
)

# Constants
features = FEATURES.UPGRADE
Expand Down Expand Up @@ -83,7 +93,7 @@ def main() -> None:
"""
)

args = parser.parse_args()
args, unknown = parser.parse_known_args()

# Run example script
main()
Loading

0 comments on commit 93038c6

Please sign in to comment.