From 01c027b51209ffd59653c632b1aeefa002eca73c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20S=C3=B8gaard?= Date: Mon, 13 May 2024 06:01:45 +0000 Subject: [PATCH] =?UTF-8?q?Deploying=20to=20gh-pages=20from=20@=20Aske-Ros?= =?UTF-8?q?ted/graphnet@6c8878667d833ad386ca756ebd4a9d5b12987e57=20?= =?UTF-8?q?=F0=9F=9A=80?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .buildinfo | 2 +- _images/dataconverter.svg | 74 + _images/datarepresentation.svg | 280 ++++ _images/favicon.svg | 118 ++ _images/model.svg | 38 + _images/standardmodel.svg | 148 ++ _modules/graphnet/data/constants.html | 53 +- .../graphnet/data/curated_datamodule.html | 684 ++++++++ _modules/graphnet/data/dataclasses.html | 37 +- _modules/graphnet/data/dataconverter.html | 96 +- _modules/graphnet/data/dataloader.html | 39 +- _modules/graphnet/data/datamodule.html | 151 +- _modules/graphnet/data/dataset/dataset.html | 124 +- .../data/dataset/parquet/parquet_dataset.html | 512 ++++-- .../data/dataset/sqlite/sqlite_dataset.html | 59 +- .../data/extractors/combine_extractors.html | 428 +++++ .../graphnet/data/extractors/extractor.html | 42 +- .../data/extractors/icecube/i3extractor.html | 37 +- .../icecube/i3featureextractor.html | 37 +- .../icecube/i3genericextractor.html | 37 +- .../icecube/i3hybridrecoextractor.html | 37 +- .../icecube/i3ntmuonlabelsextractor.html | 37 +- .../icecube/i3particleextractor.html | 37 +- .../extractors/icecube/i3pisaextractor.html | 37 +- .../extractors/icecube/i3quesoextractor.html | 37 +- .../extractors/icecube/i3retroextractor.html | 37 +- .../icecube/i3splinempeextractor.html | 37 +- .../extractors/icecube/i3truthextractor.html | 37 +- .../extractors/icecube/i3tumextractor.html | 37 +- .../icecube/utilities/collections.html | 37 +- .../extractors/icecube/utilities/frames.html | 37 +- .../icecube/utilities/i3_filters.html | 37 +- .../extractors/icecube/utilities/types.html | 37 +- .../internal/parquet_extractor.html | 424 +++++ .../data/extractors/liquido/h5_extractor.html | 488 ++++++ .../prometheus/prometheus_extractor.html | 488 ++++++ .../data/parquet/deprecated_methods.html | 37 +- _modules/graphnet/data/pipeline.html | 595 ------- .../data/pre_configured/dataconverters.html | 79 +- .../data/readers/graphnet_file_reader.html | 79 +- _modules/graphnet/data/readers/i3reader.html | 51 +- .../data/readers/internal_parquet_reader.html | 448 ++++++ .../graphnet/data/readers/liquido_reader.html | 442 ++++++ .../data/readers/prometheus_reader.html | 456 ++++++ .../data/sqlite/deprecated_methods.html | 37 +- .../data/utilities/parquet_to_sqlite.html | 549 ------- _modules/graphnet/data/utilities/random.html | 37 +- .../data/utilities/sqlite_utilities.html | 45 +- .../utilities/string_selection_resolver.html | 37 +- .../data/writers/graphnet_writer.html | 37 +- .../graphnet/data/writers/parquet_writer.html | 265 +++- .../graphnet/data/writers/sqlite_writer.html | 50 +- .../datasets/prometheus_datasets.html | 543 +++++++ _modules/graphnet/datasets/test_dataset.html | 454 ++++++ _modules/graphnet/deployment/deployer.html | 37 +- .../deployment/deployment_module.html | 37 +- .../deployment/icecube/cleaning_module.html | 37 +- .../deployment/icecube/inference_module.html | 37 +- _modules/graphnet/exceptions/exceptions.html | 395 +++++ _modules/graphnet/models/coarsening.html | 37 +- .../graphnet/models/components/embedding.html | 107 +- .../graphnet/models/components/layers.html | 37 +- _modules/graphnet/models/components/pool.html | 37 +- .../graphnet/models/detector/detector.html | 37 +- .../graphnet/models/detector/icecube.html | 44 +- .../graphnet/models/detector/liquido.html | 428 +++++ .../graphnet/models/detector/prometheus.html | 437 ++++- _modules/graphnet/models/gnn/RNN_tito.html | 37 +- _modules/graphnet/models/gnn/convnet.html | 37 +- _modules/graphnet/models/gnn/dynedge.html | 47 +- .../graphnet/models/gnn/dynedge_jinst.html | 37 +- .../models/gnn/dynedge_kaggle_tito.html | 37 +- _modules/graphnet/models/gnn/gnn.html | 37 +- _modules/graphnet/models/gnn/icemix.html | 55 +- .../graphnet/models/graphs/edges/edges.html | 37 +- .../models/graphs/edges/minkowski.html | 37 +- .../models/graphs/graph_definition.html | 37 +- _modules/graphnet/models/graphs/graphs.html | 37 +- .../graphnet/models/graphs/nodes/nodes.html | 136 +- _modules/graphnet/models/graphs/utils.html | 54 +- _modules/graphnet/models/model.html | 37 +- _modules/graphnet/models/rnn/node_rnn.html | 37 +- .../models/standard_averaged_model.html | 37 +- _modules/graphnet/models/standard_model.html | 37 +- .../graphnet/models/task/classification.html | 37 +- .../graphnet/models/task/reconstruction.html | 37 +- _modules/graphnet/models/task/task.html | 37 +- .../graphnet/models/transformer/iseecube.html | 495 ++++++ _modules/graphnet/models/utils.html | 37 +- _modules/graphnet/pisa/fitting.html | 1183 -------------- _modules/graphnet/pisa/plotting.html | 542 ------- _modules/graphnet/training/callbacks.html | 37 +- _modules/graphnet/training/labels.html | 76 +- .../graphnet/training/loss_functions.html | 37 +- _modules/graphnet/training/utils.html | 43 +- .../graphnet/training/weight_fitting.html | 37 +- _modules/graphnet/utilities/argparse.html | 37 +- .../utilities/config/base_config.html | 37 +- .../utilities/config/configurable.html | 37 +- .../utilities/config/dataset_config.html | 37 +- .../utilities/config/model_config.html | 37 +- .../graphnet/utilities/config/parsing.html | 37 +- .../utilities/config/training_config.html | 37 +- .../graphnet/utilities/deprecation_tools.html | 37 +- _modules/graphnet/utilities/filesys.html | 40 +- _modules/graphnet/utilities/imports.html | 53 +- _modules/graphnet/utilities/logging.html | 37 +- _modules/graphnet/utilities/maths.html | 37 +- _modules/index.html | 54 +- _sources/about.md.txt | 48 - _sources/about/about.rst.txt | 35 + .../graphnet.data.curated_datamodule.rst.txt | 8 + ...data.extractors.combine_extractors.rst.txt | 8 + ...ractors.internal.parquet_extractor.rst.txt | 8 + .../graphnet.data.extractors.internal.rst.txt | 36 + ...ta.extractors.liquido.h5_extractor.rst.txt | 8 + .../graphnet.data.extractors.liquido.rst.txt | 36 + ...rs.prometheus.prometheus_extractor.rst.txt | 8 + ...raphnet.data.extractors.prometheus.rst.txt | 36 + _sources/api/graphnet.data.extractors.rst.txt | 4 + ...ta.readers.internal_parquet_reader.rst.txt | 8 + ...aphnet.data.readers.liquido_reader.rst.txt | 8 + ...net.data.readers.prometheus_reader.rst.txt | 8 + _sources/api/graphnet.data.readers.rst.txt | 3 + _sources/api/graphnet.data.rst.txt | 2 +- ...phnet.datasets.prometheus_datasets.rst.txt | 8 + _sources/api/graphnet.datasets.rst.txt | 37 + .../graphnet.datasets.test_dataset.rst.txt | 8 + .../graphnet.exceptions.exceptions.rst.txt | 8 + ...sa.rst.txt => graphnet.exceptions.rst.txt} | 9 +- ... graphnet.models.detector.liquido.rst.txt} | 4 +- _sources/api/graphnet.models.detector.rst.txt | 1 + _sources/api/graphnet.models.rst.txt | 1 + ...phnet.models.transformer.iseecube.rst.txt} | 4 +- .../api/graphnet.models.transformer.rst.txt | 36 + _sources/api/graphnet.pisa.plotting.rst.txt | 8 - _sources/api/graphnet.rst.txt | 3 +- _sources/contribute.md.txt | 37 - _sources/contribute/contribute.rst.txt | 54 + .../data_conversion/data_conversion.rst.txt | 278 ++++ _sources/datasets/datasets.rst.txt | 368 +++++ .../getting_started/getting_started.md.txt | 695 ++++++++ _sources/index.rst.txt | 20 +- _sources/install.md.txt | 82 - _sources/installation/install.rst.txt | 46 + _sources/integration/integration.rst.txt | 261 +++ _sources/intro/intro.rst.txt | 40 + _sources/models/models.rst.txt | 515 ++++++ _sources/substitutions.rst.txt | 15 + _static/basic.css | 2 +- _static/doctools.js | 2 +- _static/language_data.js | 4 +- _static/searchtools.js | 165 +- about/about.html | 414 +++++ api/graphnet.constants.html | 48 +- api/graphnet.data.constants.html | 96 +- api/graphnet.data.curated_datamodule.html | 947 +++++++++++ api/graphnet.data.dataclasses.html | 72 +- api/graphnet.data.dataconverter.html | 71 +- api/graphnet.data.dataloader.html | 78 +- api/graphnet.data.datamodule.html | 77 +- api/graphnet.data.dataset.dataset.html | 91 +- api/graphnet.data.dataset.html | 55 +- api/graphnet.data.dataset.parquet.html | 54 +- ....data.dataset.parquet.parquet_dataset.html | 114 +- api/graphnet.data.dataset.sqlite.html | 54 +- ...et.data.dataset.sqlite.sqlite_dataset.html | 66 +- ...t.data.extractors.combine_extractors.html} | 202 ++- api/graphnet.data.extractors.extractor.html | 88 +- api/graphnet.data.extractors.html | 98 +- api/graphnet.data.extractors.icecube.html | 82 +- ...t.data.extractors.icecube.i3extractor.html | 82 +- ...extractors.icecube.i3featureextractor.html | 82 +- ...extractors.icecube.i3genericextractor.html | 84 +- ...ractors.icecube.i3hybridrecoextractor.html | 84 +- ...ctors.icecube.i3ntmuonlabelsextractor.html | 86 +- ...xtractors.icecube.i3particleextractor.html | 84 +- ...ta.extractors.icecube.i3pisaextractor.html | 84 +- ...a.extractors.icecube.i3quesoextractor.html | 86 +- ...a.extractors.icecube.i3retroextractor.html | 84 +- ...tractors.icecube.i3splinempeextractor.html | 84 +- ...a.extractors.icecube.i3truthextractor.html | 82 +- ...ata.extractors.icecube.i3tumextractor.html | 90 +- ...ractors.icecube.utilities.collections.html | 86 +- ...a.extractors.icecube.utilities.frames.html | 94 +- ...net.data.extractors.icecube.utilities.html | 82 +- ...tractors.icecube.utilities.i3_filters.html | 106 +- ...ta.extractors.icecube.utilities.types.html | 104 +- api/graphnet.data.extractors.internal.html | 640 ++++++++ ...extractors.internal.parquet_extractor.html | 673 ++++++++ ....data.extractors.liquido.h5_extractor.html | 713 +++++++++ api/graphnet.data.extractors.liquido.html | 642 ++++++++ api/graphnet.data.extractors.prometheus.html | 642 ++++++++ ...ctors.prometheus.prometheus_extractor.html | 724 +++++++++ api/graphnet.data.html | 70 +- ...phnet.data.parquet.deprecated_methods.html | 54 +- api/graphnet.data.parquet.html | 54 +- ...et.data.pre_configured.dataconverters.html | 88 +- api/graphnet.data.pre_configured.html | 55 +- ...net.data.readers.graphnet_file_reader.html | 87 +- api/graphnet.data.readers.html | 87 +- api/graphnet.data.readers.i3reader.html | 81 +- ...data.readers.internal_parquet_reader.html} | 310 ++-- api/graphnet.data.readers.liquido_reader.html | 687 ++++++++ ...aphnet.data.readers.prometheus_reader.html | 687 ++++++++ ...aphnet.data.sqlite.deprecated_methods.html | 54 +- api/graphnet.data.sqlite.html | 60 +- api/graphnet.data.utilities.html | 59 +- ...hnet.data.utilities.parquet_to_sqlite.html | 125 +- api/graphnet.data.utilities.random.html | 56 +- ...phnet.data.utilities.sqlite_utilities.html | 78 +- ...a.utilities.string_selection_resolver.html | 64 +- ...graphnet.data.writers.graphnet_writer.html | 64 +- api/graphnet.data.writers.html | 54 +- api/graphnet.data.writers.parquet_writer.html | 91 +- api/graphnet.data.writers.sqlite_writer.html | 62 +- api/graphnet.datasets.html | 512 ++++++ ...graphnet.datasets.prometheus_datasets.html | 666 ++++++++ api/graphnet.datasets.test_dataset.html | 547 +++++++ api/graphnet.deployment.deployer.html | 48 +- ...graphnet.deployment.deployment_module.html | 54 +- api/graphnet.deployment.html | 54 +- ...ployment.i3modules.deprecated_methods.html | 48 +- api/graphnet.deployment.i3modules.html | 48 +- ...et.deployment.icecube.cleaning_module.html | 48 +- api/graphnet.deployment.icecube.html | 48 +- ...raphnet.deployment.icecube.i3deployer.html | 48 +- ...t.deployment.icecube.inference_module.html | 48 +- api/graphnet.exceptions.exceptions.html | 517 ++++++ ...net.pisa.html => graphnet.exceptions.html} | 89 +- api/graphnet.html | 72 +- api/graphnet.models.coarsening.html | 93 +- api/graphnet.models.components.embedding.html | 86 +- api/graphnet.models.components.html | 66 +- api/graphnet.models.components.layers.html | 115 +- api/graphnet.models.components.pool.html | 93 +- api/graphnet.models.detector.detector.html | 80 +- api/graphnet.models.detector.html | 87 +- api/graphnet.models.detector.icecube.html | 154 +- api/graphnet.models.detector.liquido.html | 720 +++++++++ api/graphnet.models.detector.prometheus.html | 1412 ++++++++++++++++- api/graphnet.models.gnn.RNN_tito.html | 71 +- api/graphnet.models.gnn.convnet.html | 71 +- api/graphnet.models.gnn.dynedge.html | 73 +- api/graphnet.models.gnn.dynedge_jinst.html | 71 +- ...aphnet.models.gnn.dynedge_kaggle_tito.html | 71 +- api/graphnet.models.gnn.gnn.html | 71 +- api/graphnet.models.gnn.html | 65 +- api/graphnet.models.gnn.icemix.html | 75 +- api/graphnet.models.graphs.edges.edges.html | 93 +- api/graphnet.models.graphs.edges.html | 65 +- ...raphnet.models.graphs.edges.minkowski.html | 69 +- ...aphnet.models.graphs.graph_definition.html | 69 +- api/graphnet.models.graphs.graphs.html | 75 +- api/graphnet.models.graphs.html | 65 +- api/graphnet.models.graphs.nodes.html | 65 +- api/graphnet.models.graphs.nodes.nodes.html | 97 +- api/graphnet.models.graphs.utils.html | 87 +- api/graphnet.models.html | 77 +- api/graphnet.models.model.html | 87 +- api/graphnet.models.rnn.html | 65 +- api/graphnet.models.rnn.node_rnn.html | 71 +- ...aphnet.models.standard_averaged_model.html | 89 +- api/graphnet.models.standard_model.html | 131 +- api/graphnet.models.task.classification.html | 77 +- api/graphnet.models.task.html | 65 +- api/graphnet.models.task.reconstruction.html | 117 +- api/graphnet.models.task.task.html | 105 +- api/graphnet.models.transformer.html | 583 +++++++ api/graphnet.models.transformer.iseecube.html | 654 ++++++++ api/graphnet.models.utils.html | 79 +- api/graphnet.pisa.fitting.html | 673 -------- api/graphnet.training.callbacks.html | 64 +- api/graphnet.training.html | 55 +- api/graphnet.training.labels.html | 81 +- api/graphnet.training.loss_functions.html | 114 +- api/graphnet.training.utils.html | 150 +- api/graphnet.training.weight_fitting.html | 66 +- api/graphnet.utilities.argparse.html | 60 +- ...graphnet.utilities.config.base_config.html | 58 +- ...raphnet.utilities.config.configurable.html | 52 +- ...phnet.utilities.config.dataset_config.html | 84 +- api/graphnet.utilities.config.html | 48 +- ...raphnet.utilities.config.model_config.html | 54 +- api/graphnet.utilities.config.parsing.html | 64 +- ...hnet.utilities.config.training_config.html | 56 +- api/graphnet.utilities.decorators.html | 48 +- api/graphnet.utilities.deprecation_tools.html | 48 +- api/graphnet.utilities.filesys.html | 56 +- api/graphnet.utilities.html | 49 +- api/graphnet.utilities.imports.html | 71 +- api/graphnet.utilities.logging.html | 94 +- api/graphnet.utilities.maths.html | 50 +- api/modules.html | 41 +- contribute.html => contribute/contribute.html | 148 +- data_conversion/data_conversion.html | 690 ++++++++ datasets/datasets.html | 826 ++++++++++ genindex.html | 534 ++++++- getting_started/getting_started.html | 1091 +++++++++++++ index.html | 107 +- install.html | 518 ------ installation/install.html | 625 ++++++++ integration/integration.html | 705 ++++++++ intro/intro.html | 406 +++++ models/models.html | 1018 ++++++++++++ objects.inv | Bin 7789 -> 8955 bytes py-modindex.html | 134 +- search.html | 42 +- searchindex.js | 2 +- sitemap.xml | 2 +- about.html => substitutions.html | 92 +- 311 files changed, 39665 insertions(+), 7557 deletions(-) create mode 100644 _images/dataconverter.svg create mode 100644 _images/datarepresentation.svg create mode 100644 _images/favicon.svg create mode 100644 _images/model.svg create mode 100644 _images/standardmodel.svg create mode 100644 _modules/graphnet/data/curated_datamodule.html create mode 100644 _modules/graphnet/data/extractors/combine_extractors.html create mode 100644 _modules/graphnet/data/extractors/internal/parquet_extractor.html create mode 100644 _modules/graphnet/data/extractors/liquido/h5_extractor.html create mode 100644 _modules/graphnet/data/extractors/prometheus/prometheus_extractor.html delete mode 100644 _modules/graphnet/data/pipeline.html create mode 100644 _modules/graphnet/data/readers/internal_parquet_reader.html create mode 100644 _modules/graphnet/data/readers/liquido_reader.html create mode 100644 _modules/graphnet/data/readers/prometheus_reader.html delete mode 100644 _modules/graphnet/data/utilities/parquet_to_sqlite.html create mode 100644 _modules/graphnet/datasets/prometheus_datasets.html create mode 100644 _modules/graphnet/datasets/test_dataset.html create mode 100644 _modules/graphnet/exceptions/exceptions.html create mode 100644 _modules/graphnet/models/detector/liquido.html create mode 100644 _modules/graphnet/models/transformer/iseecube.html delete mode 100644 _modules/graphnet/pisa/fitting.html delete mode 100644 _modules/graphnet/pisa/plotting.html delete mode 100644 _sources/about.md.txt create mode 100644 _sources/about/about.rst.txt create mode 100644 _sources/api/graphnet.data.curated_datamodule.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.combine_extractors.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.internal.parquet_extractor.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.internal.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.liquido.h5_extractor.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.liquido.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.prometheus.prometheus_extractor.rst.txt create mode 100644 _sources/api/graphnet.data.extractors.prometheus.rst.txt create mode 100644 _sources/api/graphnet.data.readers.internal_parquet_reader.rst.txt create mode 100644 _sources/api/graphnet.data.readers.liquido_reader.rst.txt create mode 100644 _sources/api/graphnet.data.readers.prometheus_reader.rst.txt create mode 100644 _sources/api/graphnet.datasets.prometheus_datasets.rst.txt create mode 100644 _sources/api/graphnet.datasets.rst.txt create mode 100644 _sources/api/graphnet.datasets.test_dataset.rst.txt create mode 100644 _sources/api/graphnet.exceptions.exceptions.rst.txt rename _sources/api/{graphnet.pisa.rst.txt => graphnet.exceptions.rst.txt} (74%) rename _sources/api/{graphnet.pisa.fitting.rst.txt => graphnet.models.detector.liquido.rst.txt} (52%) rename _sources/api/{graphnet.data.pipeline.rst.txt => graphnet.models.transformer.iseecube.rst.txt} (51%) create mode 100644 _sources/api/graphnet.models.transformer.rst.txt delete mode 100644 _sources/api/graphnet.pisa.plotting.rst.txt delete mode 100644 _sources/contribute.md.txt create mode 100644 _sources/contribute/contribute.rst.txt create mode 100644 _sources/data_conversion/data_conversion.rst.txt create mode 100644 _sources/datasets/datasets.rst.txt create mode 100644 _sources/getting_started/getting_started.md.txt delete mode 100644 _sources/install.md.txt create mode 100644 _sources/installation/install.rst.txt create mode 100644 _sources/integration/integration.rst.txt create mode 100644 _sources/intro/intro.rst.txt create mode 100644 _sources/models/models.rst.txt create mode 100644 _sources/substitutions.rst.txt create mode 100644 about/about.html create mode 100644 api/graphnet.data.curated_datamodule.html rename api/{graphnet.data.pipeline.html => graphnet.data.extractors.combine_extractors.html} (70%) create mode 100644 api/graphnet.data.extractors.internal.html create mode 100644 api/graphnet.data.extractors.internal.parquet_extractor.html create mode 100644 api/graphnet.data.extractors.liquido.h5_extractor.html create mode 100644 api/graphnet.data.extractors.liquido.html create mode 100644 api/graphnet.data.extractors.prometheus.html create mode 100644 api/graphnet.data.extractors.prometheus.prometheus_extractor.html rename api/{graphnet.pisa.plotting.html => graphnet.data.readers.internal_parquet_reader.html} (54%) create mode 100644 api/graphnet.data.readers.liquido_reader.html create mode 100644 api/graphnet.data.readers.prometheus_reader.html create mode 100644 api/graphnet.datasets.html create mode 100644 api/graphnet.datasets.prometheus_datasets.html create mode 100644 api/graphnet.datasets.test_dataset.html create mode 100644 api/graphnet.exceptions.exceptions.html rename api/{graphnet.pisa.html => graphnet.exceptions.html} (83%) create mode 100644 api/graphnet.models.detector.liquido.html create mode 100644 api/graphnet.models.transformer.html create mode 100644 api/graphnet.models.transformer.iseecube.html delete mode 100644 api/graphnet.pisa.fitting.html rename contribute.html => contribute/contribute.html (68%) create mode 100644 data_conversion/data_conversion.html create mode 100644 datasets/datasets.html create mode 100644 getting_started/getting_started.html delete mode 100644 install.html create mode 100644 installation/install.html create mode 100644 integration/integration.html create mode 100644 intro/intro.html create mode 100644 models/models.html rename about.html => substitutions.html (56%) diff --git a/.buildinfo b/.buildinfo index 67510ee00..e8e38a82f 100644 --- a/.buildinfo +++ b/.buildinfo @@ -1,4 +1,4 @@ # Sphinx build info version 1 # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 38e647a1ad5b910b669ed012bb05995f +config: 93489d55c3e099bd65b6f8c23347c61a tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/_images/dataconverter.svg b/_images/dataconverter.svg new file mode 100644 index 000000000..b30246132 --- /dev/null +++ b/_images/dataconverter.svg @@ -0,0 +1,74 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_images/datarepresentation.svg b/_images/datarepresentation.svg new file mode 100644 index 000000000..775afef71 --- /dev/null +++ b/_images/datarepresentation.svg @@ -0,0 +1,280 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_images/favicon.svg b/_images/favicon.svg new file mode 100644 index 000000000..f384f90af --- /dev/null +++ b/_images/favicon.svg @@ -0,0 +1,118 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_images/model.svg b/_images/model.svg new file mode 100644 index 000000000..8d09ede64 --- /dev/null +++ b/_images/model.svg @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_images/standardmodel.svg b/_images/standardmodel.svg new file mode 100644 index 000000000..571ac438d --- /dev/null +++ b/_images/standardmodel.svg @@ -0,0 +1,148 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/_modules/graphnet/data/constants.html b/_modules/graphnet/data/constants.html index 9b2cbe685..c276bb049 100644 --- a/_modules/graphnet/data/constants.html +++ b/_modules/graphnet/data/constants.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -355,7 +382,8 @@

    Source code for graphnet.dat "sensor_pos_z", "t", ] - KAGGLE = ["x", "y", "z", "time", "charge", "auxiliary"] + KAGGLE = ["x", "y", "z", "time", "charge", "auxiliary"] + LIQUIDO = ["sipm_x", "sipm_y", "sipm_z", "t"] @@ -375,7 +403,6 @@

    Source code for graphnet.dat "zenith", "pid", "elasticity", - "sim_type", "interaction_type", "interaction_time", # Added for vertex reconstruction "inelasticity", @@ -411,7 +438,17 @@

    Source code for graphnet.dat "primary_hadron_1_energy", "total_energy", ] - KAGGLE = ["zenith", "azimuth"] + KAGGLE = ["zenith", "azimuth"] + LIQUIDO = [ + "vertex_x", + "vertex_y", + "vertex_z", + "zenith", + "azimuth", + "interaction_time", + "energy", + "pid", + ] @@ -437,7 +474,7 @@

    Source code for graphnet.dat Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/curated_datamodule.html b/_modules/graphnet/data/curated_datamodule.html new file mode 100644 index 000000000..c336e86b7 --- /dev/null +++ b/_modules/graphnet/data/curated_datamodule.html @@ -0,0 +1,684 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.curated_datamodule — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.curated_datamodule

    +"""Contains a Generic class for curated DataModules/Datasets.
    +
    +Inheriting subclasses are data-specific implementations that allow the user to
    +import and download pre-converteddatasets for training of deep learning based
    +methods in GraphNeT.
    +"""
    +
    +from typing import Dict, Any, Optional, List, Tuple, Union
    +from abc import abstractmethod
    +import os
    +
    +from .datamodule import GraphNeTDataModule
    +from graphnet.models.graphs import GraphDefinition
    +from graphnet.data.dataset import ParquetDataset, SQLiteDataset
    +
    +
    +
    +[docs] +class CuratedDataset(GraphNeTDataModule): + """Generic base class for curated datasets. + + Curated Datasets in GraphNeT are pre-converted datasets that have been + prepared for training and evaluation of deep learning models. On these + Datasets, graphnet users can train and benchmark their models against SOTA + methods. + """ + + def __init__( + self, + graph_definition: GraphDefinition, + download_dir: str, + truth: Optional[List[str]] = None, + features: Optional[List[str]] = None, + backend: str = "parquet", + train_dataloader_kwargs: Optional[Dict[str, Any]] = None, + validation_dataloader_kwargs: Dict[str, Any] = None, + test_dataloader_kwargs: Dict[str, Any] = None, + ) -> None: + """Construct CuratedDataset. + + Args: + graph_definition: Method that defines the data representation. + download_dir: Directory to download dataset to. + truth (Optional): List of event-level truth to include. Will + include all available information if not given. + features (Optional): List of input features from pulsemap to use. + If not given, all available features will be + used. + backend (Optional): data backend to use. Either "parquet" or + "sqlite". Defaults to "parquet". + train_dataloader_kwargs (Optional): Arguments for the training + DataLoader. Default None. + validation_dataloader_kwargs (Optional): Arguments for the + validation DataLoader, Default None. + test_dataloader_kwargs (Optional): Arguments for the test + DataLoader. Default None. + """ + # From user + self._download_dir = download_dir + self._graph_definition = graph_definition + self._backend = backend.lower() + + # Checks + assert backend.lower() in self.available_backends + assert backend.lower() in ["sqlite", "parquet"] # Double-check + if backend.lower() == "parquet": + dataset_ref = ParquetDataset # type: ignore + elif backend.lower() == "sqlite": + dataset_ref = SQLiteDataset # type: ignore + + # Methods: + features, truth = self._verify_args(features=features, truth=truth) + self.prepare_data() + self._check_properties() + dataset_args, selec, test_selec = self._prepare_args( + backend=backend, features=features, truth=truth + ) + # Instantiate + super().__init__( + dataset_reference=dataset_ref, + dataset_args=dataset_args, + train_dataloader_kwargs=train_dataloader_kwargs, + validation_dataloader_kwargs=validation_dataloader_kwargs, + test_dataloader_kwargs=test_dataloader_kwargs, + selection=selec, + test_selection=test_selec, + ) + +
    +[docs] + @abstractmethod + def prepare_data(self) -> None: + """Download and prepare data."""
    + + + @abstractmethod + def _prepare_args( + self, backend: str, features: List[str], truth: List[str] + ) -> Tuple[Dict[str, Any], Union[List[int], None], Union[List[int], None]]: + """Prepare arguments to DataModule. + + Args: + backend: backend of dataset. Either "parquet" or "sqlite" + features: List of features from user to use as input. + truth: List of event-level truth form user. + + This method should return three outputs in the following order: + + A) `dataset_args` + B) `selection` if wanted, else None + C) ``test_selection` if wanted, else None. + + See documentation on GraphNeTDataModule for details on these + arguments: + https://graphnet-team.github.io/graphnet/api/graphnet.data.datamodule.html + """ + + def _verify_args( + self, features: Union[List[str], None], truth: Union[List[str], None] + ) -> Tuple[List[str], List[str]]: + """Check arguments for truth and features from the user. + + Will check to make sure that the given args are available. If not + available, and AssertError is thrown. + """ + if features is None: + features = self._features + else: + self._assert_isin(given=features, available=self._features) + if truth is None: + truth = self._event_truth + else: + self._assert_isin(given=truth, available=self._event_truth) + + return features, truth + + def _assert_isin(self, given: List[str], available: List[str]) -> None: + for key in given: + assert key in available + +
    +[docs] + def description(self) -> None: + """Print details on the Dataset.""" + event_counts = self.events + print( + "\n", + f"{self.__class__.__name__} contains data from", + f"{self.experiment} and was added to GraphNeT by", + f"{self.creator}.", + "\n\n", + "COMMENTS ON USAGE: \n", + f"{self.creator}: {self.comments} \n", + "\n", + "DATASET DETAILS: \n", + f"pulsemaps: {self.pulsemaps} \n", + f"truth table: {self.truth_table} \n", + f"input features: {self.features}\n", + f"pulse truth: {self.pulse_truth} \n", + f"event truth: {self.event_truth} \n", + f"Number of training events: {event_counts['train']} \n", + f"Number of validation events: {event_counts['val']} \n", + f"Number of test events: {event_counts['test']} \n", + "\n", + "CITATION:\n", + f"{self.citation}", + )
    + + + def _check_properties(self) -> None: + """Check that fields have been filled out.""" + attr = [ + "pulsemaps", + "truth_table", + "event_truth", + "pulse_truth", + "features", + "experiment", + "citation", + "creator", + "available_backends", + ] + for attribute in attr: + assert hasattr(self, "_" + attribute), f"missing {attribute}" + + @property + def pulsemaps(self) -> List[str]: + """Produce a list of available pulsemaps in Dataset.""" + return self._pulsemaps + + @property + def truth_table(self) -> List[str]: + """Produce name of table containing event-level truth in Dataset.""" + return self._truth_table + + @property + def event_truth(self) -> List[str]: + """Produce a list of available event-level truth in Dataset.""" + return self._event_truth + + @property + def pulse_truth(self) -> Union[List[str], None]: + """Produce a list of available pulse-level truth in Dataset.""" + return self._pulse_truth + + @property + def features(self) -> List[str]: + """Produce a list of available input features in Dataset.""" + return self._features + + @property + def experiment(self) -> str: + """Produce the name of the experiment that the data comes from.""" + return self._experiment + + @property + def citation(self) -> str: + """Produce a string that describes how to cite this Dataset.""" + return self._citation + + @property + def comments(self) -> str: + """Produce comments on the dataset from the creator.""" + return self._comments + + @property + def creator(self) -> str: + """Produce name of person who created the Dataset.""" + return self._creator + + @property + def events(self) -> Dict[str, int]: + """Produce a dict that contains number events in each selection.""" + n_train = len(self._train_dataset) + if hasattr(self, "_val_dataset"): + n_val = len(self._val_dataset) + else: + n_val = 0 + if hasattr(self, "_test_dataset"): + n_test = len(self._test_dataset) + else: + n_test = 0 + + return {"train": n_train, "val": n_val, "test": n_test} + + @property + def available_backends(self) -> List[str]: + """Produce a list of available data formats that the data comes in.""" + return self._available_backends + + @property + def dataset_dir(self) -> str: + """Produce path directory that contains dataset files.""" + dataset_dir = os.path.join( + self._download_dir, self.__class__.__name__, self._backend + ) + return dataset_dir
    + + + +
    +[docs] +class ERDAHostedDataset(CuratedDataset): + """A base class for dataset/datamodule hosted at ERDA. + + Inheriting subclasses will just need to fill out the `_file_hashes` + attribute, which points to the file-id of a ERDA-hosted sharelink. It + is assumed that sharelinks point to a single compressed file that has + been compressed using `tar` with extension ".tar.gz". + + E.g. suppose that the sharelink below + https://sid.erda.dk/share_redirect/FbEEzAbg5A + points to a compressed sqlite database. Then: + _file_hashes = {'sqlite' : "FbEEzAbg5A"} + """ + + # Member variables + _mirror = "https://sid.erda.dk/share_redirect" + _file_hashes: Dict[str, str] = {} # Must be filled out by you! + +
    +[docs] + def prepare_data(self) -> None: + """Prepare the dataset for training.""" + assert self._file_hashes is not None # mypy + file_hash = self._file_hashes[self._backend] + if os.path.exists(self.dataset_dir): + return + else: + # Download, unzip and delete zipped file + os.makedirs(self.dataset_dir) + file_path = os.path.join(self.dataset_dir, file_hash + ".tar.gz") + os.system(f"wget -O {file_path} {self._mirror}/{file_hash}") + os.system(f"tar -xf {file_path} -C {self.dataset_dir}") + os.system(f"rm {file_path}")
    +
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/dataclasses.html b/_modules/graphnet/data/dataclasses.html index a93a3e455..c4ed82038 100644 --- a/_modules/graphnet/data/dataclasses.html +++ b/_modules/graphnet/data/dataclasses.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -372,7 +399,7 @@

    Source code for graphnet.d Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/dataconverter.html b/_modules/graphnet/data/dataconverter.html index 5ccbe3ea5..f2911d05e 100644 --- a/_modules/graphnet/data/dataconverter.html +++ b/_modules/graphnet/data/dataconverter.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -333,7 +360,6 @@

    Source code for graphnet from multiprocessing.sharedctypes import Synchronized import pandas as pd import os -from glob import glob from graphnet.utilities.decorators import final @@ -342,6 +368,10 @@

    Source code for graphnet from .writers.graphnet_writer import GraphNeTWriter from .extractors import Extractor from .extractors.icecube import I3Extractor +from .extractors.liquido import H5Extractor +from .extractors.internal import ParquetExtractor +from .extractors.prometheus import PrometheusExtractor + from .dataclasses import I3FileSet @@ -369,7 +399,13 @@

    Source code for graphnet file_reader: GraphNeTFileReader, save_method: GraphNeTWriter, outdir: str, - extractors: Union[List[Extractor], List[I3Extractor]], + extractors: Union[ + List[Extractor], + List[I3Extractor], + List[ParquetExtractor], + List[H5Extractor], + List[PrometheusExtractor], + ], index_column: str = "event_no", num_workers: int = 1, ) -> None: @@ -387,6 +423,9 @@

    Source code for graphnet num_workers: The number of CPUs used for parallel processing. Defaults to 1 (no multiprocessing). """ + # Base class constructor + super().__init__(name=__name__, class_name=self.__class__.__name__) + # Member Variable Assignment self._file_reader = file_reader self._save_method = save_method @@ -400,10 +439,8 @@

    Source code for graphnet # with reader. if not isinstance(extractors, list): extractors = [extractors] - self._file_reader.set_extractors(extractors=extractors) - # Base class constructor - super().__init__(name=__name__, class_name=self.__class__.__name__) + self._file_reader.set_extractors(extractors=extractors) @final def __call__(self, input_dir: Union[str, List[str]]) -> None: @@ -447,10 +484,9 @@

    Source code for graphnet # Iterate over files for _ in map_fn( self._process_file, - tqdm(input_files, unit="file(s)", colour="green"), + tqdm(input_files, unit=" file(s)", colour="green"), ): self.debug("processing file.") - self._update_shared_variables(pool) @final @@ -463,13 +499,27 @@

    Source code for graphnet This function is called in parallel. """ # Read and apply extractors - data: List[OrderedDict] = self._file_reader(file_path=file_path) - - # Count number of events - n_events = len(data) - - # Assign event_no's to each event in data and transform to pd.DataFrame - dataframes = self._assign_event_no(data=data) + data = self._file_reader(file_path=file_path) + + # + if isinstance(data, list): + # Assign event_no's to each event in data + # and transform to pd.DataFrame + n_events = len(data) + dataframes = self._assign_event_no(data=data) + elif isinstance(data, dict): + keys = [key for key in data.keys()] + counter = [] + for key in keys: + assert isinstance(data[key], pd.DataFrame) + assert self._index_column in data[key].columns + counter.append(len(data[key][self._index_column])) + dataframes = data + n_events = len( + pd.unique(data[keys[np.argmin(counter)]][self._index_column]) + ) + else: + assert 1 == 2, "should not reach here." # Delete `data` to save memory del data @@ -539,7 +589,6 @@

    Source code for graphnet ) -> int: """Count number of rows that features from `extractor_name` have.""" extractor_dict = event_dict[extractor_name] - try: # If all features in extractor_name have the same length # this line of code will execute without error and result @@ -632,7 +681,9 @@

    Source code for graphnet
    [docs] @final - def merge_files(self, files: Optional[List[str]] = None) -> None: + def merge_files( + self, files: Optional[List[str]] = None, **kwargs: Any + ) -> None: """Merge converted files. `DataConverter` will call the `.merge_files` method in the @@ -660,8 +711,7 @@

    Source code for graphnet merge_path = os.path.join(self._output_dir, "merged") self.info(f"Merging files to {merge_path}") self._save_method.merge_files( - files=files_to_merge, - output_dir=merge_path, + files=files_to_merge, output_dir=merge_path, **kwargs )

    @@ -689,7 +739,7 @@

    Source code for graphnet Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/dataloader.html b/_modules/graphnet/data/dataloader.html index 6aa911562..c39bee23e 100644 --- a/_modules/graphnet/data/dataloader.html +++ b/_modules/graphnet/data/dataloader.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -363,7 +390,7 @@

    Source code for graphnet.da dataset: Dataset, batch_size: int = 1, shuffle: bool = False, - num_workers: int = 10, + num_workers: int = 1, persistent_workers: bool = True, collate_fn: Callable = collate_fn, prefetch_factor: int = 2, @@ -443,7 +470,7 @@

    Source code for graphnet.da Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/datamodule.html b/_modules/graphnet/data/datamodule.html index 9813c0db0..b8e4f9c09 100644 --- a/_modules/graphnet/data/datamodule.html +++ b/_modules/graphnet/data/datamodule.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -323,9 +350,8 @@

    Source code for graphnet.data.datamodule

     """Base `Dataloader` class(es) used in `graphnet`."""
    -from typing import Dict, Any, Optional, List, Tuple, Union
    +from typing import Dict, Any, Optional, List, Tuple, Union, Type
     import pytorch_lightning as pl
    -from torch.utils.data import DataLoader
     from copy import deepcopy
     from sklearn.model_selection import train_test_split
     import pandas as pd
    @@ -337,6 +363,7 @@ 

    Source code for graphnet.da ParquetDataset, ) from graphnet.utilities.logging import Logger +from graphnet.data.dataloader import DataLoader
    @@ -346,13 +373,15 @@

    Source code for graphnet.da def __init__( self, - dataset_reference: Union[SQLiteDataset, ParquetDataset, Dataset], + dataset_reference: Union[ + Type[SQLiteDataset], Type[ParquetDataset], Type[Dataset] + ], dataset_args: Dict[str, Any], selection: Optional[Union[List[int], List[List[int]]]] = None, test_selection: Optional[Union[List[int], List[List[int]]]] = None, - train_dataloader_kwargs: Optional[Dict[str, Any]] = None, - validation_dataloader_kwargs: Optional[Dict[str, Any]] = None, - test_dataloader_kwargs: Optional[Dict[str, Any]] = None, + train_dataloader_kwargs: Dict[str, Any] = None, + validation_dataloader_kwargs: Dict[str, Any] = None, + test_dataloader_kwargs: Dict[str, Any] = None, train_val_split: Optional[List[float]] = [0.9, 0.10], split_seed: int = 42, ) -> None: @@ -366,13 +395,14 @@

    Source code for graphnet.da selection: (Optional) a list of event id's used for training and validation, Default None. test_selection: (Optional) a list of event id's used for testing, - Default None. + Defaults to None. train_dataloader_kwargs: Arguments for the training DataLoader, - Default None. + Defaults{"batch_size": 2, "num_workers": 1}. validation_dataloader_kwargs: Arguments for the validation - DataLoader, Default None. + DataLoader. Defaults to + `train_dataloader_kwargs`. test_dataloader_kwargs: Arguments for the test DataLoader, - Default None. + Defaults to `train_dataloader_kwargs`. train_val_split (Optional): Split ratio for training and validation sets. Default is [0.9, 0.10]. split_seed: seed used for shuffling and splitting selections into @@ -387,17 +417,101 @@

    Source code for graphnet.da self._train_val_split = train_val_split or [0.0] self._rng = split_seed - self._train_dataloader_kwargs = train_dataloader_kwargs or {} - self._validation_dataloader_kwargs = validation_dataloader_kwargs or {} - self._test_dataloader_kwargs = test_dataloader_kwargs or {} + if train_dataloader_kwargs is None: + train_dataloader_kwargs = {"batch_size": 2, "num_workers": 1} + + self._set_dataloader_kwargs( + train_dataloader_kwargs, + validation_dataloader_kwargs, + test_dataloader_kwargs, + ) # If multiple dataset paths are given, we should use EnsembleDataset self._use_ensemble_dataset = isinstance( self._dataset_args["path"], list ) + # Create Dataloaders self.setup("fit") + def _set_dataloader_kwargs( + self, + train_dl_args: Dict[str, Any], + val_dl_args: Union[Dict[str, Any], None], + test_dl_args: Union[Dict[str, Any], None], + ) -> None: + """Copy train dataloader args to other dataloaders if not given. + + Also checks that ParquetDataset dataloaders have multiprocessing + context set to "spawn" as this is strictly required. + + See: https://docs.pola.rs/user-guide/misc/multiprocessing/ + """ + if val_dl_args is None: + self.info( + "No `val_dataloader_kwargs` given. This arg has " + "been set to `train_dataloader_kwargs` with `shuffle` = False." + ) + val_dl_args = deepcopy(train_dl_args) + val_dl_args["shuffle"] = False # Important for inference + if (test_dl_args is None) & (self._test_selection is not None): + test_dl_args = deepcopy(train_dl_args) + test_dl_args["shuffle"] = False # Important for inference + self.info( + "No `test_dataloader_kwargs` given. This arg has " + "been set to `train_dataloader_kwargs` with `shuffle` = False." + ) + + if self._dataset == ParquetDataset: + train_dl_args = self._add_context(train_dl_args, "training") + val_dl_args = self._add_context(val_dl_args, "validation") + if self._test_selection is not None: + assert test_dl_args is not None + test_dl_args = self._add_context(test_dl_args, "test") + + self._train_dataloader_kwargs = train_dl_args + self._validation_dataloader_kwargs = val_dl_args + self._test_dataloader_kwargs = test_dl_args or {} + + def _add_context( + self, dataloader_args: Dict[str, Any], dataloader_type: str + ) -> Dict[str, Any]: + """Handle assignment of `multiprocessing_context` arg to loaders. + + Datasets relying on threaded libraries often require the + multiprocessing context to be set to "spawn" if "num_workers" > 0. This + method will check the arguments for this entry and throw an error if + the field is already assigned to a wrong value. If the value is not + specified, it is added automatically with a log entry. + """ + arg = "multiprocessing_context" + if dataloader_args["num_workers"] != 0: + # If using multiprocessing + if arg in dataloader_args: + if dataloader_args[arg] != "spawn": + # Wrongly assigned by user + self.error( + "DataLoaders using `ParquetDataset` must have " + "multiprocessing_context = 'spawn'. " + f" Found '{dataloader_args[arg]}' in ", + f"{dataloader_type} dataloader.", + ) + raise ValueError("multiprocessing_context must be 'spawn'") + else: + # Correctly assigned by user + return dataloader_args + else: + # Forgotten assignment by user + dataloader_args[arg] = "spawn" + self.warning_once( + f"{self.__class__.__name__} has automatically " + "set multiprocessing_context = 'spawn' in " + f"{dataloader_type} dataloader." + ) + return dataloader_args + else: + return dataloader_args +
    [docs] def prepare_data(self) -> None: @@ -654,6 +768,7 @@

    Source code for graphnet.da assert isinstance(selection, (int, list)) if isinstance(selection, int): flat_selection = [selection] + elif isinstance(selection[0], list): flat_selection = [ item @@ -814,7 +929,7 @@

    Source code for graphnet.da

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/dataset/dataset.html b/_modules/graphnet/data/dataset/dataset.html index 84a3f8d92..d93f07d8f 100644 --- a/_modules/graphnet/data/dataset/dataset.html +++ b/_modules/graphnet/data/dataset/dataset.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -353,7 +380,7 @@

    Source code for graphn DatasetConfig, DatasetConfigSaverABCMeta, ) -from graphnet.utilities.config.parsing import traverse_and_apply +from graphnet.exceptions.exceptions import ColumnMissingException from graphnet.utilities.logging import Logger from graphnet.models.graphs import GraphDefinition @@ -362,13 +389,6 @@

    Source code for graphn ) -
    -[docs] -class ColumnMissingException(Exception): - """Exception to indicate a missing column in a dataset."""
    - - -
    [docs] def load_module(class_name: str) -> Type: @@ -637,13 +657,14 @@

    Source code for graphn self._path = path self._selection = None self._pulsemaps = pulsemaps - self._features = [index_column] + features + self._features = features self._truth = [index_column] + truth self._index_column = index_column self._truth_table = truth_table self._loss_weight_default_value = loss_weight_default_value self._graph_definition = deepcopy(graph_definition) self._labels = labels + self._string_column = graph_definition._detector.string_index_name if node_truth is not None: assert isinstance(node_truth_table, str) @@ -668,7 +689,10 @@

    Source code for graphn self._selection = None if self._string_selection: - self._selection = f"string in {str(tuple(self._string_selection))}" + # Broken into multple lines lines for length + col = self._string_column + condition = str(tuple(self._string_selection)) + self._selection = f"{col} in {condition}" self._loss_weight_column = loss_weight_column self._loss_weight_table = loss_weight_table @@ -711,10 +735,6 @@

    Source code for graphn else: self._indices = selection - # Purely internal member variables - self._missing_variables: Dict[str, List[str]] = {} - self._remove_missing_columns() - # Implementation-specific post-init code. self._post_init() @@ -742,9 +762,7 @@

    Source code for graphn """Return a list of all unique values in `self._index_column`.""" @abstractmethod - def _get_event_index( - self, sequential_index: Optional[int] - ) -> Optional[int]: + def _get_event_index(self, sequential_index: int) -> int: """Return the event index corresponding to a `sequential_index`."""
    @@ -756,7 +774,7 @@

    Source code for graphn columns: Union[List[str], str], sequential_index: Optional[int] = None, selection: Optional[str] = None, - ) -> List[Tuple[Any, ...]]: + ) -> np.ndarray: """Query a table at a specific index, optionally with some selection. Args: @@ -878,7 +896,9 @@

    Source code for graphn """Return a list missing columns in `table`.""" for column in columns: try: - self.query_table(table, [column], 0) + self.query_table( + table=table, columns=[column], sequential_index=0 + ) except ColumnMissingException: if table not in self._missing_variables: self._missing_variables[table] = [] @@ -890,12 +910,7 @@

    Source code for graphn def _query( self, sequential_index: int - ) -> Tuple[ - List[Tuple[float, ...]], - Tuple[Any, ...], - Optional[List[Tuple[Any, ...]]], - Optional[float], - ]: + ) -> Tuple[np.ndarray, np.ndarray, Optional[np.ndarray], Optional[float]]: """Query file for event features and truth information. The returned lists have lengths corresponding to the number of pulses @@ -917,11 +932,14 @@

    Source code for graphn features_pulsemap = self.query_table( pulsemap, self._features, sequential_index, self._selection ) - features.extend(features_pulsemap) + features.append(features_pulsemap) + + if len(self._pulsemaps) > 0: + features = np.concatenate(features, axis=0) - truth: Tuple[Any, ...] = self.query_table( + truth = self.query_table( self._truth_table, self._truth, sequential_index - )[0] + ) if self._node_truth: assert self._node_truth_table is not None node_truth = self.query_table( @@ -933,26 +951,22 @@

    Source code for graphn else: node_truth = None - loss_weight: Optional[float] = None # Default if self._loss_weight_column is not None: assert self._loss_weight_table is not None - loss_weight_list = self.query_table( + loss_weight = self.query_table( self._loss_weight_table, self._loss_weight_column, sequential_index, ) - if len(loss_weight_list): - loss_weight = loss_weight_list[0][0] - else: - loss_weight = -1.0 - + else: + loss_weight = None return features, truth, node_truth, loss_weight def _create_graph( self, - features: List[Tuple[float, ...]], - truth: Tuple[Any, ...], - node_truth: Optional[List[Tuple[Any, ...]]] = None, + features: np.ndarray, + truth: np.ndarray, + node_truth: Optional[np.ndarray] = None, loss_weight: Optional[float] = None, ) -> Data: """Create Pytorch Data (i.e. graph) object. @@ -967,9 +981,11 @@

    Source code for graphn Returns: Graph object. """ - # Convert nested list to simple dict + # Convert truth to dict + if len(truth.shape) == 1: + truth = truth.reshape(1, -1) truth_dict = { - key: truth[index] for index, key in enumerate(self._truth) + key: truth[:, index] for index, key in enumerate(self._truth) } # Define custom labels @@ -977,10 +993,9 @@

    Source code for graphn # Convert nested list to simple dict if node_truth is not None: - node_truth_array = np.asarray(node_truth) assert self._node_truth is not None node_truth_dict = { - key: node_truth_array[:, index] + key: node_truth[:, index] for index, key in enumerate(self._node_truth) } @@ -991,19 +1006,16 @@

    Source code for graphn # Catch cases with no reconstructed pulses if len(features): - node_features = np.asarray(features)[ - :, 1: - ] # first entry is index column + node_features = features else: - node_features = np.array([]).reshape((0, len(self._features) - 1)) + node_features = np.array([]).reshape((0, len(self._features))) + assert isinstance(features, np.ndarray) # Construct graph data object assert self._graph_definition is not None graph = self._graph_definition( input_features=node_features, - input_feature_names=self._features[ - 1: - ], # first entry is index column + input_feature_names=self._features, truth_dicts=truth_dicts, custom_label_functions=self._label_fns, loss_weight_column=self._loss_weight_column, @@ -1017,13 +1029,11 @@

    Source code for graphn """Return dictionary of labels, to be added as graph attributes.""" if "pid" in truth_dict.keys(): abs_pid = abs(truth_dict["pid"]) - sim_type = truth_dict["sim_type"] labels_dict = { self._index_column: truth_dict[self._index_column], "muon": int(abs_pid == 13), "muon_stopped": int(truth_dict.get("stopped_muon") == 1), - "noise": int((abs_pid == 1) & (sim_type != "data")), "neutrino": int( (abs_pid != 13) & (abs_pid != 1) ), # @TODO: `abs_pid in [12,14,16]`? @@ -1031,7 +1041,7 @@

    Source code for graphn "v_u": int(abs_pid == 14), "v_t": int(abs_pid == 16), "track": int( - (abs_pid == 14) & (truth_dict["interaction_type"] == 1) + (abs_pid == 14) & (truth_dict.get("interaction_type") == 1) ), "dbang": self._get_dbang_label(truth_dict), "corsika": int(abs_pid > 20), @@ -1099,7 +1109,7 @@

    Source code for graphn

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/dataset/parquet/parquet_dataset.html b/_modules/graphnet/data/dataset/parquet/parquet_dataset.html index 10827b1ba..057bdf4bf 100644 --- a/_modules/graphnet/data/dataset/parquet/parquet_dataset.html +++ b/_modules/graphnet/data/dataset/parquet/parquet_dataset.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -322,145 +349,396 @@

    Source code for graphnet.data.dataset.parquet.parquet_dataset

    -"""`Dataset` class(es) for reading from Parquet files."""
    -
    -from typing import Any, Dict, List, Optional, Tuple, Union, cast
    +"""Base :py:class:`Dataset` class(es) used in GraphNeT."""
    +
    +from copy import deepcopy
    +from abc import ABC, abstractmethod
    +from typing import (
    +    cast,
    +    Any,
    +    Callable,
    +    Dict,
    +    List,
    +    Optional,
    +    Tuple,
    +    Union,
    +    Iterable,
    +    Type,
    +)
     
     import numpy as np
    -import awkward as ak
    +import torch
    +import os
    +from torch_geometric.data import Data
    +import polars as pol
    +from polars.series.series import Series
    +from polars.exceptions import InvalidOperationError
    +from glob import glob
    +from bisect import bisect_right
    +from collections import OrderedDict
     
    -from graphnet.data.dataset.dataset import Dataset, ColumnMissingException
    +from graphnet.models.graphs import GraphDefinition
    +from graphnet.data.dataset import Dataset
    +from graphnet.exceptions.exceptions import ColumnMissingException
     
     
     
    [docs] class ParquetDataset(Dataset): - """Pytorch dataset for reading from Parquet files.""" + """Dataset class for Parquet-files converted with `ParquetWriter`.""" - # Implementing abstract method(s) - def _init(self) -> None: - # Check(s) - if not isinstance(self._path, list): - - assert isinstance(self._path, str) - - assert self._path.endswith( - ".parquet" - ), f"Format of input file `{self._path}` is not supported" - - assert ( - self._node_truth is None - ), "Argument `node_truth` is currently not supported." - assert ( - self._node_truth_table is None - ), "Argument `node_truth_table` is currently not supported." - assert ( - self._string_selection is None - ), "Argument `string_selection` is currently not supported" - - # Set custom member variable(s) - if not isinstance(self._path, list): - self._parquet_hook = ak.from_parquet(self._path, lazy=False) - else: - self._parquet_hook = ak.concatenate( - ak.from_parquet(file) for file in self._path - ) + def __init__( + self, + path: str, + graph_definition: GraphDefinition, + pulsemaps: Union[str, List[str]], + features: List[str], + truth: List[str], + *, + node_truth: Optional[List[str]] = None, + index_column: str = "event_no", + truth_table: str = "truth", + node_truth_table: Optional[str] = None, + string_selection: Optional[List[int]] = None, + selection: Optional[Union[str, List[int], List[List[int]]]] = None, + dtype: torch.dtype = torch.float32, + loss_weight_table: Optional[str] = None, + loss_weight_column: Optional[str] = None, + loss_weight_default_value: Optional[float] = None, + seed: Optional[int] = None, + cache_size: int = 1, + ): + """Construct Dataset. + + NOTE: DataLoaders using this Dataset should have + "multiprocessing_context = 'spawn'" set to avoid thread locking. + + Args: + path: Path to the file(s) from which this `Dataset` should read. + pulsemaps: Name(s) of the pulse map series that should be used to + construct the nodes on the individual graph objects, and their + features. Multiple pulse series maps can be used, e.g., when + different DOM types are stored in different maps. + features: List of columns in the input files that should be used as + node features on the graph objects. + truth: List of event-level columns in the input files that should + be used added as attributes on the graph objects. + node_truth: List of node-level columns in the input files that + should be used added as attributes on the graph objects. + index_column: Name of the column in the input files that contains + unique indicies to identify and map events across tables. + truth_table: Name of the table containing event-level truth + information. + node_truth_table: Name of the table containing node-level truth + information. + string_selection: Subset of strings for which data should be read + and used to construct graph objects. Defaults to None, meaning + all strings for which data exists are used. + selection: The batch ids to include in the dataset. + Defaults to None, meaning that batches are read. + dtype: Type of the feature tensor on the graph objects returned. + loss_weight_table: Name of the table containing per-event loss + weights. + loss_weight_column: Name of the column in `loss_weight_table` + containing per-event loss weights. This is also the name of the + corresponding attribute assigned to the graph object. + loss_weight_default_value: Default per-event loss weight. + NOTE: This default value is only applied when + `loss_weight_table` and `loss_weight_column` are specified, and + in this case to events with no value in the corresponding + table/column. That is, if no per-event loss weight table/column + is provided, this value is ignored. Defaults to None. + seed: Random number generator seed, used for selecting a random + subset of events when resolving a string-based selection (e.g., + `"10000 random events ~ event_no % 5 > 0"` or `"20% random + events ~ event_no % 5 > 0"`). + graph_definition: Method that defines the graph representation. + cache_size: Number of batches to cache in memory. + Must be at least 1. Defaults to 1. + """ + self._validate_selection(selection) + # Base class constructor + super().__init__( + path=path, + pulsemaps=pulsemaps, + features=features, + truth=truth, + node_truth=node_truth, + index_column=index_column, + truth_table=truth_table, + node_truth_table=node_truth_table, + string_selection=string_selection, + selection=selection, + dtype=dtype, + loss_weight_table=loss_weight_table, + loss_weight_column=loss_weight_column, + loss_weight_default_value=loss_weight_default_value, + seed=seed, + graph_definition=graph_definition, + ) - def _get_all_indices(self) -> List[int]: - return np.arange( - len( - ak.to_numpy( - self._parquet_hook[self._truth_table][self._index_column] - ).tolist() - ) - ).tolist() + # mypy.. + assert isinstance(self._path, str) + self._path: str = self._path + # Member Variables + self._cache_size = cache_size + self._batch_sizes = self._calculate_sizes() + self._batch_cumsum = np.cumsum(self._batch_sizes) + self._file_cache = self._initialize_file_cache( + truth_table=truth_table, + node_truth_table=node_truth_table, + pulsemaps=pulsemaps, + ) + self._string_selection = string_selection + # Purely internal member variables + self._missing_variables: Dict[str, List[str]] = {} + self._remove_missing_columns() - def _get_event_index( - self, sequential_index: Optional[int] - ) -> Optional[int]: - index: Optional[int] - if sequential_index is None: - index = None - else: - index = cast(List[int], self._indices)[sequential_index] - - return index - - def _format_dictionary_result( - self, dictionary: Dict - ) -> List[Tuple[Any, ...]]: - """Convert the output of `ak.to_list()` into a list of tuples.""" - # All scalar values - if all(map(np.isscalar, dictionary.values())): - return [tuple(dictionary.values())] - - # All arrays should have same length - array_lengths = [ - len(values) - for values in dictionary.values() - if not np.isscalar(values) - ] - assert len(set(array_lengths)) == 1, ( - f"Arrays in {dictionary} have differing lengths " - f"({set(array_lengths)})." + def _initialize_file_cache( + self, + truth_table: str, + node_truth_table: Optional[str], + pulsemaps: Union[str, List[str]], + ) -> Dict[str, OrderedDict]: + tables = [truth_table] + if node_truth_table is not None: + tables.append(node_truth_table) + if isinstance(pulsemaps, str): + tables.append(pulsemaps) + elif isinstance(pulsemaps, list): + tables.extend(pulsemaps) + + cache: Dict[str, OrderedDict] = {} + for table in tables: + cache[table] = OrderedDict() + return cache + + def _validate_selection( + self, + selection: Optional[Union[str, List[int], List[List[int]]]] = None, + ) -> None: + if selection is not None: + try: + assert not isinstance(selection, str) + except AssertionError: + e = AssertionError( + f"{self.__class__.__name__} does not support " + "str-selections." + ) + raise e + + def _init(self) -> None: + return + + def _get_event_index(self, sequential_index: int) -> int: + event_index = self.query_table( + table=self._truth_table, + sequential_index=sequential_index, + columns=[self._index_column], ) - nb_elements = array_lengths[0] + return event_index - # Broadcast scalars - for key in dictionary: - value = dictionary[key] - if np.isscalar(value): - dictionary[key] = np.repeat( - value, repeats=nb_elements - ).tolist() + def __len__(self) -> int: + """Return length of dataset, i.e. number of training examples.""" + return sum(self._batch_sizes) - return list(map(tuple, list(zip(*dictionary.values())))) + def _get_all_indices(self) -> List[int]: + """Return a list of all unique values in `self._index_column`.""" + files = glob(os.path.join(self._path, self._truth_table, "*.parquet")) + return np.arange(0, len(files), 1) + + def _calculate_sizes(self) -> List[int]: + """Calculate the number of events in each batch.""" + sizes = [] + for batch_id in self._indices: + path = os.path.join( + self._path, + self._truth_table, + f"{self.truth_table}_{batch_id}.parquet", + ) + sizes.append(len(pol.read_parquet(path))) + return sizes + + def _get_row_idx(self, sequential_index: int) -> int: + """Return the row index corresponding to a `sequential_index`.""" + file_idx = bisect_right(self._batch_cumsum, sequential_index) + if file_idx > 0: + idx = int(sequential_index - self._batch_cumsum[file_idx - 1]) + else: + idx = sequential_index + return idx
    [docs] - def query_table( + def query_table( # type: ignore self, table: str, columns: Union[List[str], str], sequential_index: Optional[int] = None, selection: Optional[str] = None, - ) -> List[Tuple[Any, ...]]: - """Query table at a specific index, optionally with some selection.""" - # Check(s) - assert ( - selection is None - ), "Argument `selection` is currently not supported" - - index = self._get_event_index(sequential_index) - - try: - if index is None: - ak_array = self._parquet_hook[table][columns][:] - else: - ak_array = self._parquet_hook[table][columns][index] - except ValueError as e: - if "does not exist (not in record)" in str(e): - raise ColumnMissingException(str(e)) - else: - raise e + ) -> np.ndarray: + """Query a table at a specific index, optionally with some selection. + + Args: + table: Table to be queried. + columns: Columns to read out. + sequential_index: Sequentially numbered index + (i.e. in [0,len(self))) of the event to query. This _may_ + differ from the indexation used in `self._indices`. If no value + is provided, the entire column is returned. + selection: Selection to be imposed before reading out data. + Defaults to None. + + Returns: + List of tuples containing the values in `columns`. If the `table` + contains only scalar data for `columns`, a list of length 1 is + returned + + Raises: + ColumnMissingException: If one or more element in `columns` is not + present in `table`. + """ + if isinstance(columns, str): + columns = [columns] - output = ak_array.to_list() + if sequential_index is None: + file_idx = np.arange(0, len(self._batch_cumsum), 1) + else: + file_idx = [bisect_right(self._batch_cumsum, sequential_index)] + + file_indices = [self._indices[idx] for idx in file_idx] + + arrays = [] + for file_idx in file_indices: + array = self._query_table( + table=table, + columns=columns, + file_idx=file_idx, + sequential_index=sequential_index, + selection=selection, + ) + arrays.append(array) + return np.concatenate(arrays, axis=0)
    - result: List[Tuple[Any, ...]] = [] - # Querying single index - if isinstance(output, dict): - assert list(output.keys()) == columns - result = self._format_dictionary_result(output) + def _query_table( + self, + table: str, + columns: Union[List[str], str], + file_idx: int, + sequential_index: Optional[int] = None, + selection: Optional[str] = None, + ) -> np.ndarray: - # Querying entire columm - elif isinstance(output, list): - for dictionary in output: - assert list(dictionary.keys()) == columns - result.extend(self._format_dictionary_result(dictionary)) + self._load_table(table_name=table, file_idx=file_idx) + df = self._file_cache[table][file_idx] + if sequential_index is not None: + row_id = self._get_row_idx(sequential_index) + else: + row_id = np.arange(0, len(df), 1) + df = df[row_id] + if len(df) > 0: + self._raise_column_exception( + df_columns=df.columns, columns=columns, table=table + ) + data = df.select(columns) + if isinstance(data[columns[0]][0], Series): + x = [data[col][0].to_numpy().reshape(-1, 1) for col in columns] + array = np.concatenate(x, axis=1) + else: + array = data.to_numpy() + else: + array = np.array() + return array + + def _load_table(self, table_name: str, file_idx: int) -> None: + """Load and possibly cache a parquet table.""" + if file_idx not in self._file_cache[table_name].keys(): + file_path = os.path.join( + self._path, table_name, f"{table_name}_{file_idx}.parquet" + ) + df = pol.read_parquet(file_path).sort(self._index_column) + if (table_name in self._pulsemaps) or ( + table_name == self._node_truth_table + ): + if table_name == self._node_truth_table: + pol_columns = [pol.col(self._node_truth)] + else: + pol_columns = [pol.col(feat) for feat in self._features] + + if self._string_selection: + pol_columns.append(pol.col(self._string_column)) + + df = df.group_by(self._index_column).agg(pol_columns) + + self._file_cache[table_name][file_idx] = df.sort( + self._index_column + ) + n_files_cached: int = len(self._file_cache[table_name]) + if n_files_cached > self._cache_size: + del self._file_cache[table_name][ + list(self._file_cache[table_name].keys())[0] + ] + + def _raise_column_exception( + self, df_columns: List[str], columns: Union[List[str], str], table: str + ) -> None: + if isinstance(columns, str): + columns = [columns] + for column in columns: + if column not in df_columns: + raise ColumnMissingException(f"{column} not in {table}") + + def __getitem__(self, sequential_index: int) -> Data: + """Return graph `Data` object at `index`.""" + if not (0 <= sequential_index < len(self)): + raise IndexError( + f"Index {sequential_index} not in range [0, {len(self) - 1}]" + ) + if self._node_truth_table is not None: + assert isinstance(self._node_truth, (list, str)) # mypy.. + node_truth = self.query_table( + table=self._node_truth_table, + columns=self._node_truth, + sequential_index=sequential_index, + ) + else: + node_truth = None + + if self._loss_weight_table is not None: + assert isinstance(self._loss_weight_column, str) + loss_weight = self.query_table( + table=self._loss_weight_table, + columns=self._loss_weight_column, + sequential_index=sequential_index, + ) + else: + loss_weight = None + + features = [] + for pulsemap in self._pulsemaps: + features.append( + self.query_table( + table=pulsemap, + columns=self._features, + sequential_index=sequential_index, + ) + ) + features = np.concatenate(features, axis=0) - return result
    -
    + truth = self.query_table( + table=self._truth_table, + columns=self._truth, + sequential_index=sequential_index, + ) + + graph = self._create_graph( + features=features, + truth=truth, + node_truth=node_truth, + loss_weight=loss_weight, + ) + return graph

    @@ -486,7 +764,7 @@

    Source Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/dataset/sqlite/sqlite_dataset.html b/_modules/graphnet/data/dataset/sqlite/sqlite_dataset.html index ef174366f..7cbda8a17 100644 --- a/_modules/graphnet/data/dataset/sqlite/sqlite_dataset.html +++ b/_modules/graphnet/data/dataset/sqlite/sqlite_dataset.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -324,9 +351,10 @@

    Source code for graphnet.data.dataset.sqlite.sqlite_dataset

     """`Dataset` class(es) for reading data from SQLite databases."""
     
    -from typing import Any, List, Optional, Tuple, Union
    +from typing import Any, List, Optional, Tuple, Union, Dict
     import pandas as pd
     import sqlite3
    +import numpy as np
     
     from graphnet.data.dataset.dataset import Dataset, ColumnMissingException
     
    @@ -363,6 +391,9 @@ 

    Source c self._conn: Optional[sqlite3.Connection] = None def _post_init(self) -> None: + # Purely internal member variables + self._missing_variables: Dict[str, List[str]] = {} + self._remove_missing_columns() self._close_connection()
    @@ -405,7 +436,7 @@

    Source c raise ColumnMissingException(str(e)) else: raise e - return result

    + return np.asarray(result)

    def _get_all_indices(self) -> List[int]: @@ -416,18 +447,18 @@

    Source c self._close_connection() return indices.values.ravel().tolist() - def _get_event_index( - self, sequential_index: Optional[int] - ) -> Optional[int]: + def _get_event_index(self, sequential_index: Optional[int]) -> int: index: int = 0 if sequential_index is not None: index_ = self._indices[sequential_index] if self._database_list is None: - assert isinstance(index_, int) - index = index_ + if not isinstance(index_, int): + index_ = int(index_) # type: ignore else: - assert isinstance(index_, list) - index = index_[0] + if not isinstance(index_, int): + index_ = int(index_[0]) + assert isinstance(index_, int) + index = index_ return index # Custom, internal method(s) @@ -501,7 +532,7 @@

    Source c Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/combine_extractors.html b/_modules/graphnet/data/extractors/combine_extractors.html new file mode 100644 index 000000000..76854d3e1 --- /dev/null +++ b/_modules/graphnet/data/extractors/combine_extractors.html @@ -0,0 +1,428 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.extractors.combine_extractors — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.extractors.combine_extractors

    +"""Module for combining multiple extractors into a single extractor."""
    +from typing import TYPE_CHECKING
    +
    +from graphnet.utilities.imports import has_icecube_package
    +from graphnet.data.extractors.icecube.i3extractor import I3Extractor
    +from typing import List, Dict
    +
    +if has_icecube_package() or TYPE_CHECKING:
    +    from icecube import icetray  # pyright: reportMissingImports=false
    +
    +
    +
    +[docs] +class CombinedExtractor(I3Extractor): + """Class for combining multiple extractors. + + This class is used to combine multiple extractors into a single extractor + with a new name. + """ + + def __init__(self, extractors: List[I3Extractor], extractor_name: str): + """Construct CombinedExtractor. + + Args: + extractors: List of extractors to combine. The extractors must all return data on the same level; e.g. all event-level data or pulse-level data. Mixing tables that contain event-level and pulse-level information will fail. + extractor_name: Name of the new extractor. + """ + super().__init__(extractor_name=extractor_name) + self._extractors = extractors + + def __call__(self, frame: "icetray.I3Frame") -> Dict[str, float]: + """Extract data from frame using all extractors. + + Args: + frame: I3Frame to extract data from. + """ + output = {} + for extractor in self._extractors: + output.update(extractor(frame)) + return output
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/extractors/extractor.html b/_modules/graphnet/data/extractors/extractor.html index 3d5037a63..22f43696f 100644 --- a/_modules/graphnet/data/extractors/extractor.html +++ b/_modules/graphnet/data/extractors/extractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -323,8 +350,9 @@

    Source code for graphnet.data.extractors.extractor

     """Base I3Extractor class(es)."""
    -from typing import Any
    +from typing import Any, Union
     from abc import ABC, abstractmethod
    +import pandas as pd
     
     from graphnet.utilities.logging import Logger
     
    @@ -362,7 +390,7 @@ 

    Source code for g super().__init__(name=__name__, class_name=self.__class__.__name__) @abstractmethod - def __call__(self, data: Any) -> dict: + def __call__(self, data: Any) -> Union[dict, pd.DataFrame]: """Extract information from data.""" pass @@ -395,7 +423,7 @@

    Source code for g

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3extractor.html b/_modules/graphnet/data/extractors/icecube/i3extractor.html index 4715a0a3c..92e23878c 100644 --- a/_modules/graphnet/data/extractors/icecube/i3extractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3extractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -444,7 +471,7 @@

    Source Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3featureextractor.html b/_modules/graphnet/data/extractors/icecube/i3featureextractor.html index df6ff594f..e527f6b29 100644 --- a/_modules/graphnet/data/extractors/icecube/i3featureextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3featureextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -663,7 +690,7 @@

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3genericextractor.html b/_modules/graphnet/data/extractors/icecube/i3genericextractor.html index 9c046b41a..284e1d308 100644 --- a/_modules/graphnet/data/extractors/icecube/i3genericextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3genericextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -643,7 +670,7 @@

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3hybridrecoextractor.html b/_modules/graphnet/data/extractors/icecube/i3hybridrecoextractor.html index d40fde3f4..d35e08408 100644 --- a/_modules/graphnet/data/extractors/icecube/i3hybridrecoextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3hybridrecoextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -400,7 +427,7 @@

    Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3ntmuonlabelsextractor.html b/_modules/graphnet/data/extractors/icecube/i3ntmuonlabelsextractor.html index 53a306b78..99c65d232 100644 --- a/_modules/graphnet/data/extractors/icecube/i3ntmuonlabelsextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3ntmuonlabelsextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -407,7 +434,7 @@

    Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3particleextractor.html b/_modules/graphnet/data/extractors/icecube/i3particleextractor.html index 59ba1c7c4..511fcb466 100644 --- a/_modules/graphnet/data/extractors/icecube/i3particleextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3particleextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -393,7 +420,7 @@

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3pisaextractor.html b/_modules/graphnet/data/extractors/icecube/i3pisaextractor.html index 9134c216c..01b12285e 100644 --- a/_modules/graphnet/data/extractors/icecube/i3pisaextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3pisaextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -385,7 +412,7 @@

    Sou Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3quesoextractor.html b/_modules/graphnet/data/extractors/icecube/i3quesoextractor.html index a0d16868c..5231089b1 100644 --- a/_modules/graphnet/data/extractors/icecube/i3quesoextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3quesoextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -395,7 +422,7 @@

    So Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3retroextractor.html b/_modules/graphnet/data/extractors/icecube/i3retroextractor.html index 5299392c8..ccfdfa786 100644 --- a/_modules/graphnet/data/extractors/icecube/i3retroextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3retroextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -467,7 +494,7 @@

    So Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3splinempeextractor.html b/_modules/graphnet/data/extractors/icecube/i3splinempeextractor.html index d2c33d7d9..d794fc6b0 100644 --- a/_modules/graphnet/data/extractors/icecube/i3splinempeextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3splinempeextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -379,7 +406,7 @@

    Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3truthextractor.html b/_modules/graphnet/data/extractors/icecube/i3truthextractor.html index 386bed579..ce42482c5 100644 --- a/_modules/graphnet/data/extractors/icecube/i3truthextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3truthextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -789,7 +816,7 @@

    So Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/i3tumextractor.html b/_modules/graphnet/data/extractors/icecube/i3tumextractor.html index 2bfb35fdd..ebafcaef6 100644 --- a/_modules/graphnet/data/extractors/icecube/i3tumextractor.html +++ b/_modules/graphnet/data/extractors/icecube/i3tumextractor.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -382,7 +409,7 @@

    Sour Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/utilities/collections.html b/_modules/graphnet/data/extractors/icecube/utilities/collections.html index 680b630b3..60354a156 100644 --- a/_modules/graphnet/data/extractors/icecube/utilities/collections.html +++ b/_modules/graphnet/data/extractors/icecube/utilities/collections.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -436,7 +463,7 @@

    Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/utilities/frames.html b/_modules/graphnet/data/extractors/icecube/utilities/frames.html index 559d122c9..1cfc52261 100644 --- a/_modules/graphnet/data/extractors/icecube/utilities/frames.html +++ b/_modules/graphnet/data/extractors/icecube/utilities/frames.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -439,7 +466,7 @@

    So Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/utilities/i3_filters.html b/_modules/graphnet/data/extractors/icecube/utilities/i3_filters.html index e02d4474e..7237fa78b 100644 --- a/_modules/graphnet/data/extractors/icecube/utilities/i3_filters.html +++ b/_modules/graphnet/data/extractors/icecube/utilities/i3_filters.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -483,7 +510,7 @@

    Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/icecube/utilities/types.html b/_modules/graphnet/data/extractors/icecube/utilities/types.html index 8cda573aa..347bdc75f 100644 --- a/_modules/graphnet/data/extractors/icecube/utilities/types.html +++ b/_modules/graphnet/data/extractors/icecube/utilities/types.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -660,7 +687,7 @@

    Sou Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/extractors/internal/parquet_extractor.html b/_modules/graphnet/data/extractors/internal/parquet_extractor.html new file mode 100644 index 000000000..6554d80d0 --- /dev/null +++ b/_modules/graphnet/data/extractors/internal/parquet_extractor.html @@ -0,0 +1,424 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.extractors.internal.parquet_extractor — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.extractors.internal.parquet_extractor

    +"""Parquet Extractor for conversion from internal parquet format."""
    +import polars as pol
    +import pandas as pd
    +
    +from graphnet.data.extractors import Extractor
    +
    +
    +
    +[docs] +class ParquetExtractor(Extractor): + """Class for extracting information from internal GraphNeT parquet files. + + Contains functionality required to extract data from internal parquet + files, i.e files saved using the ParquetWriter. This allows for conversion + between internal data formats. + """ + + def __init__(self, extractor_name: str): + """Construct ParquetExtractor. + + Args: + extractor_name: Name of the `ParquetExtractor` instance. + Used to keep track of the provenance of different data, + and to name tables to which this data is saved. + """ + # Member variable(s) + self._table = extractor_name + # Base class constructor + super().__init__(extractor_name=extractor_name) + + def __call__(self, file_path: str) -> pd.DataFrame: + """Extract information from parquet file.""" + if self._table in file_path: + return pol.read_parquet(file_path).to_pandas() + else: + return None
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/extractors/liquido/h5_extractor.html b/_modules/graphnet/data/extractors/liquido/h5_extractor.html new file mode 100644 index 000000000..d3e267f2f --- /dev/null +++ b/_modules/graphnet/data/extractors/liquido/h5_extractor.html @@ -0,0 +1,488 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.extractors.liquido.h5_extractor — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.extractors.liquido.h5_extractor

    +"""H5 Extractor for LiquidO data files."""
    +from typing import List
    +import numpy as np
    +import pandas as pd
    +import h5py
    +
    +from graphnet.data.extractors import Extractor
    +
    +
    +
    +[docs] +class H5Extractor(Extractor): + """Class for extracting information from LiquidO h5 files.""" + + def __init__(self, extractor_name: str, column_names: List[str]): + """Construct H5Extractor. + + Args: + extractor_name: Name of the `ParquetExtractor` instance. + Used to keep track of the provenance of different data, + and to name tables to which this data is saved. + column_names: Name of the columns in `extractor_name`. + """ + # Member variable(s) + self._table = extractor_name + self._column_names = column_names + # Base class constructor + super().__init__(extractor_name=extractor_name) + + def __call__(self, file_path: str) -> pd.DataFrame: + """Extract information from h5 file.""" + with h5py.File(file_path, "r") as f: + available_tables = [f for f in f.keys()] + if self._table in available_tables: + array = f[self._table][:] + # Will throw error if the number of columns don't match + self._verify_columns(array) + df = pd.DataFrame(array, columns=self._column_names) + return df + else: + return None + + def _verify_columns(self, array: np.ndarray) -> None: + try: + assert array.shape[1] == len(self._column_names) + except AssertionError as e: + self.error( + f"Got {len(self._column_names)} column names but " + f"{self._table} has {array.shape[1]}. Please make sure " + f"that the column names match. ({self._column_names})" + ) + raise e
    + + + +
    +[docs] +class H5HitExtractor(H5Extractor): + """Extractor for `HitData` in LiquidO H5 files.""" + + def __init__(self) -> None: + """Extractor for `HitData` in LiquidO H5 files.""" + # Base class constructor + super().__init__( + extractor_name="HitData", + column_names=[ + "event_no", + "sipmID", + "sipm_x", + "sipm_y", + "sipm_z", + "t", + "var", + ], + )
    + + + +
    +[docs] +class H5TruthExtractor(H5Extractor): + """Extractor for `TruthData` in LiquidO H5 files.""" + + def __init__(self) -> None: + """Extractor for `TruthData` in LiquidO H5 files.""" + # Base class constructor + super().__init__( + extractor_name="TruthData", + column_names=[ + "event_no", + "vertex_x", + "vertex_y", + "vertex_z", + "zenith", + "azimuth", + "interaction_time", + "energy", + "pid", + ], + )
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/extractors/prometheus/prometheus_extractor.html b/_modules/graphnet/data/extractors/prometheus/prometheus_extractor.html new file mode 100644 index 000000000..c31911ff8 --- /dev/null +++ b/_modules/graphnet/data/extractors/prometheus/prometheus_extractor.html @@ -0,0 +1,488 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.extractors.prometheus.prometheus_extractor — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.extractors.prometheus.prometheus_extractor

    +"""Parquet Extractor for conversion of simulation files from PROMETHEUS."""
    +from typing import List
    +import pandas as pd
    +import numpy as np
    +
    +from graphnet.data.extractors import Extractor
    +
    +
    +
    +[docs] +class PrometheusExtractor(Extractor): + """Class for extracting information from PROMETHEUS parquet files. + + Contains functionality required to extract data from PROMETHEUS parquet + files. + """ + + def __init__(self, extractor_name: str, columns: List[str]): + """Construct PrometheusExtractor. + + Args: + extractor_name: Name of the `PrometheusExtractor` instance. + Used to keep track of the provenance of different data, + and to name tables to which this data is saved. + columns: List of column names to extract from the table. + """ + # Member variable(s) + self._table = extractor_name + self._columns = columns + # Base class constructor + super().__init__(extractor_name=extractor_name) + + def __call__(self, event: pd.DataFrame) -> pd.DataFrame: + """Extract information from parquet file.""" + output = {key: [] for key in self._columns} # type: ignore + for key in self._columns: + if key in event.keys(): + data = event[key] + if isinstance(data, np.ndarray): + data = data.tolist() + if isinstance(data, list): + output[key].extend(data) + else: + output[key].append(data) + else: + self.warning_once(f"{key} not found in {self._table}!") + return output
    + + + +
    +[docs] +class PrometheusTruthExtractor(PrometheusExtractor): + """Class for extracting event level truth from Prometheus parquet files. + + This Extractor will "initial_state" i.e. neutrino truth. + """ + + def __init__(self, table_name: str = "mc_truth") -> None: + """Construct PrometheusTruthExtractor. + + Args: + table_name: Name of the table in the parquet files that contain + event-level truth. Defaults to "mc_truth". + """ + columns = [ + "interaction", + "initial_state_energy", + "initial_state_type", + "initial_state_zenith", + "initial_state_azimuth", + "initial_state_x", + "initial_state_y", + "initial_state_z", + ] + super().__init__(extractor_name=table_name, columns=columns)
    + + + +
    +[docs] +class PrometheusFeatureExtractor(PrometheusExtractor): + """Class for extracting pulses/photons from Prometheus parquet files.""" + + def __init__(self, table_name: str = "photons"): + """Construct PrometheusFeatureExtractor. + + Args: + table_name: Name of table in parquet files that contain the + photons/pulses. Defaults to "photons". + """ + columns = [ + "sensor_pos_x", + "sensor_pos_y", + "sensor_pos_z", + "string_id", + "sensor_id", + "t", + ] + super().__init__(extractor_name=table_name, columns=columns)
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/parquet/deprecated_methods.html b/_modules/graphnet/data/parquet/deprecated_methods.html index 238ca2429..ca1425ee8 100644 --- a/_modules/graphnet/data/parquet/deprecated_methods.html +++ b/_modules/graphnet/data/parquet/deprecated_methods.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -409,7 +436,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/pipeline.html b/_modules/graphnet/data/pipeline.html deleted file mode 100644 index d969001ce..000000000 --- a/_modules/graphnet/data/pipeline.html +++ /dev/null @@ -1,595 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - graphnet.data.pipeline — graphnet documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Skip to content -
    - -
    - - -
    - - - - -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    -
    - - -
    -
    -
    - -
    -
    - -

    Source code for graphnet.data.pipeline

    -"""Class(es) used for analysis in PISA."""
    -
    -from abc import ABC
    -import dill
    -from functools import reduce
    -import os
    -from typing import Dict, List, Optional, Tuple
    -
    -import numpy as np
    -import pandas as pd
    -from pytorch_lightning import Trainer
    -import sqlite3
    -import torch
    -from torch.utils.data import DataLoader
    -
    -from graphnet.data.utilities.sqlite_utilities import (
    -    create_table_and_save_to_sql,
    -)
    -from graphnet.training.utils import get_predictions, make_dataloader
    -from graphnet.models.graphs import GraphDefinition
    -
    -from graphnet.utilities.logging import Logger
    -
    -
    -
    -[docs] -class InSQLitePipeline(ABC, Logger): - """Create a SQLite database for PISA analysis. - - The database will contain truth and GNN predictions and, if available, - RETRO reconstructions. - """ - - def __init__( - self, - module_dict: Dict, - features: List[str], - truth: List[str], - device: torch.device, - retro_table_name: str = "retro", - outdir: Optional[str] = None, - batch_size: int = 100, - n_workers: int = 10, - pipeline_name: str = "pipeline", - ): - """Initialise the pipeline. - - Args: - module_dict: A dictionary with GNN modules from GraphNet. E.g. - {'energy': gnn_module_for_energy_regression} - features: List of input features for the GNN modules. - truth: List of truth for the GNN ModuleList. - device: The device used for computation. - retro_table_name: Name of the retro table for. - outdir: the directory in which the pipeline database will be - stored. - batch_size: Batch size for inference. - n_workers: Number of workers used in dataloading. - pipeline_name: Name of the pipeline. If such a pipeline already - exists, an error will be prompted to avoid overwriting. - """ - self._pipeline_name = pipeline_name - self._device = device - self.n_workers = n_workers - self._features = features - self._truth = truth - self._batch_size = batch_size - self._outdir = outdir - self._module_dict = module_dict - self._retro_table_name = retro_table_name - - # Base class constructor - super().__init__(name=__name__, class_name=self.__class__.__name__) - - def __call__( - self, - database: str, - pulsemap: str, - graph_definition: GraphDefinition, - chunk_size: int = 1000000, - ) -> None: - """Run inference of each field in self._module_dict[target]['']. - - Args: - database: Path to database with pulsemap and truth. - pulsemap: Name of pulsemaps. - graph_definition: GraphDefinition for Dataset - chunk_size: database will be sliced in chunks of size `chunk_size`. - Use this parameter to control memory usage. - """ - outdir = self._get_outdir(database) - if isinstance( - self._device, str - ): # Because pytorch lightning insists on breaking pytorch cuda device naming scheme - device = int(self._device[-1]) - if not os.path.isdir(outdir): - dataloaders, event_batches = self._setup_dataloaders( - graph_definition=graph_definition, - chunk_size=chunk_size, - db=database, - pulsemap=pulsemap, - selection=None, - persistent_workers=False, - ) - i = 0 - for dataloader in dataloaders: - self.info("CHUNK %s / %s" % (i, len(dataloaders))) - df = self._inference(device, dataloader) - truth = self._get_truth(database, event_batches[i].tolist()) - retro = self._get_retro(database, event_batches[i].tolist()) - self._append_to_pipeline(outdir, truth, retro, df) - i += 1 - else: - self.info(outdir) - self.info( - "WARNING - Pipeline named %s already exists! \n Please rename pipeline!" - % self._pipeline_name - ) - - def _setup_dataloaders( - self, - chunk_size: int, - db: str, - pulsemap: str, - graph_definition: GraphDefinition, - selection: Optional[List[int]] = None, - persistent_workers: bool = False, - ) -> Tuple[List[DataLoader], List[np.ndarray]]: - if selection is None: - selection = self._get_all_event_nos(db) - n_chunks = np.ceil(len(selection) / chunk_size) - event_batches = np.array_split(selection, n_chunks) - dataloaders = [] - for batch in event_batches: - dataloaders.append( - make_dataloader( - db=db, - graph_definition=graph_definition, - pulsemaps=pulsemap, - features=self._features, - truth=self._truth, - batch_size=self._batch_size, - shuffle=False, - selection=batch.tolist(), - num_workers=self.n_workers, - persistent_workers=persistent_workers, - ) - ) - return dataloaders, event_batches - - def _get_all_event_nos(self, db: str) -> List[int]: - with sqlite3.connect(db) as con: - query = "SELECT event_no FROM truth" - selection = pd.read_sql(query, con).values.ravel().tolist() - return selection - - def _combine_outputs(self, dataframes: List[pd.DataFrame]) -> pd.DataFrame: - return reduce(lambda x, y: pd.merge(x, y, on="event_no"), dataframes) - - def _inference( - self, device: torch.device, dataloader: DataLoader - ) -> pd.DataFrame: - dataframes = [] - for target in self._module_dict.keys(): - # dataloader = iter(dataloader) - trainer = Trainer(devices=[device], accelerator="gpu") - model = torch.load( - self._module_dict[target]["path"], - map_location="cpu", - pickle_module=dill, - ) - model.eval() - model.inference() - results = get_predictions( - trainer, - model, - dataloader, - self._module_dict[target]["output_column_names"], - additional_attributes=["event_no"], - ) - dataframes.append( - results.sort_values("event_no").reset_index(drop=True) - ) - df = self._combine_outputs(dataframes) - return df - - def _get_outdir(self, database: str) -> str: - if self._outdir is None: - database_name = database.split("/")[-3] - outdir = ( - database.split(database_name)[0] - + database_name - + "/pipelines/" - + self._pipeline_name - ) - else: - outdir = self._outdir - return outdir - - def _get_truth(self, database: str, selection: List[int]) -> pd.DataFrame: - with sqlite3.connect(database) as con: - query = "SELECT * FROM truth WHERE event_no in %s" % str( - tuple(selection) - ) - truth = pd.read_sql(query, con) - return truth - - def _get_retro(self, database: str, selection: List[int]) -> pd.DataFrame: - try: - with sqlite3.connect(database) as con: - query = "SELECT * FROM %s WHERE event_no in %s" % ( - self._retro_table_name, - str(tuple(selection)), - ) - retro = pd.read_sql(query, con) - return retro - except: # noqa: E722 - self.info("%s table does not exist" % self._retro_table_name) - - def _append_to_pipeline( - self, - outdir: str, - truth: pd.DataFrame, - retro: pd.DataFrame, - df: pd.DataFrame, - ) -> None: - os.makedirs(outdir, exist_ok=True) - pipeline_database = outdir + "/%s.db" % self._pipeline_name - create_table_and_save_to_sql(df, "reconstruction", pipeline_database) - create_table_and_save_to_sql(truth, "truth", pipeline_database) - if isinstance(retro, pd.DataFrame): - create_table_and_save_to_sql( - retro, self._retro_table_name, pipeline_database - )
    - -
    - -
    -
    -
    -
    -
    -
    - - -
    - - - - \ No newline at end of file diff --git a/_modules/graphnet/data/pre_configured/dataconverters.html b/_modules/graphnet/data/pre_configured/dataconverters.html index 23e0e57ae..8eb56946a 100644 --- a/_modules/graphnet/data/pre_configured/dataconverters.html +++ b/_modules/graphnet/data/pre_configured/dataconverters.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -327,9 +354,10 @@

    Source c from typing import List, Union, Type from graphnet.data import DataConverter -from graphnet.data.readers import I3Reader +from graphnet.data.readers import I3Reader, ParquetReader from graphnet.data.writers import ParquetWriter, SQLiteWriter from graphnet.data.extractors.icecube import I3Extractor +from graphnet.data.extractors.internal import ParquetExtractor from graphnet.data.extractors.icecube.utilities.i3_filters import I3Filter @@ -427,6 +455,45 @@

    Source c outdir=outdir, ) + + +
    +[docs] +class ParquetToSQLiteConverter(DataConverter): + """Preconfigured DataConverter for converting Parquet to SQLite files. + + This class converts Parquet files written by ParquetWriter to SQLite. + """ + + def __init__( + self, + extractors: List[ParquetExtractor], + outdir: str, + index_column: str = "event_no", + num_workers: int = 1, + ): + """Convert internal Parquet files to SQLite. + + Args: + extractors: The `Extractor`(s) that will be applied to the input + files. + outdir: The directory to save the files in. + icetray_verbose: Set the level of verbosity of icetray. + Defaults to 0. + index_column: Name of the event id column added to the events. + Defaults to "event_no". + num_workers: The number of CPUs used for parallel processing. + Defaults to 1 (no multiprocessing). + """ + super().__init__( + file_reader=ParquetReader(), + save_method=SQLiteWriter(), + extractors=extractors, + num_workers=num_workers, + index_column=index_column, + outdir=outdir, + )
    + @@ -451,7 +518,7 @@

    Source c Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/readers/graphnet_file_reader.html b/_modules/graphnet/data/readers/graphnet_file_reader.html index b14cda367..f7706ba94 100644 --- a/_modules/graphnet/data/readers/graphnet_file_reader.html +++ b/_modules/graphnet/data/readers/graphnet_file_reader.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -328,16 +355,20 @@

    Source co file formats. """ -from typing import List, Union, OrderedDict, Any +from typing import List, Union, OrderedDict, Any, Dict from abc import abstractmethod, ABC import glob import os +import pandas as pd from graphnet.utilities.decorators import final from graphnet.utilities.logging import Logger from graphnet.data.dataclasses import I3FileSet from graphnet.data.extractors.extractor import Extractor from graphnet.data.extractors.icecube import I3Extractor +from graphnet.data.extractors.internal import ParquetExtractor +from graphnet.data.extractors.liquido import H5Extractor +from graphnet.data.extractors.prometheus import PrometheusExtractor
    @@ -357,13 +388,21 @@

    Source co _accepted_extractors: List[Any] = [] @abstractmethod - def __call__(self, file_path: Union[str, I3FileSet]) -> List[OrderedDict]: + def __call__( + self, file_path: Any + ) -> Union[List[OrderedDict[str, pd.DataFrame]], Dict[str, pd.DataFrame]]: """Open and apply extractors to a single file. - The `output` must be a list of dictionaries, where the number of events - in the file `n_events` satisfies `len(output) = n_events`. I.e each - element in the list is a dictionary, and each field in the dictionary - is the output of a single extractor. + The `output` must be either + A) list of dictionaries, where the number of events + in the file `n_events` satisfies `len(output) = n_events`. + I.e each element in the list is a dictionary, and each field in + the dictionary is the output of a single extractor. If this is + provided, the `DataConverter` will automatically assign event ids. + B) A single dictionary where each field contains a single dataframe, + which holds the data from the `Extractor` for the entire file. In + this case, the `Reader` must itself assign event ids. This method + is faster if your files are not storing events serially. """ @property @@ -412,7 +451,14 @@

    Source co [docs] @final def set_extractors( - self, extractors: Union[List[Extractor], List[I3Extractor]] + self, + extractors: Union[ + List[Extractor], + List[I3Extractor], + List[ParquetExtractor], + List[H5Extractor], + List[PrometheusExtractor], + ], ) -> None: """Set `Extractor`(s) as member variable. @@ -427,7 +473,14 @@

    Source co @final def _validate_extractors( - self, extractors: Union[List[Extractor], List[I3Extractor]] + self, + extractors: Union[ + List[Extractor], + List[I3Extractor], + List[ParquetExtractor], + List[H5Extractor], + List[PrometheusExtractor], + ], ) -> None: for extractor in extractors: try: @@ -500,7 +553,7 @@

    Source co

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/readers/i3reader.html b/_modules/graphnet/data/readers/i3reader.html index c0135b94a..b370044d7 100644 --- a/_modules/graphnet/data/readers/i3reader.html +++ b/_modules/graphnet/data/readers/i3reader.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -372,6 +399,8 @@

    Source code for graph icetray_verbose: Set the level of verbosity of icetray. Defaults to 0. """ + # checks + assert isinstance(gcd_rescue, str) # Set verbosity if icetray_verbose == 0: icetray.I3Logger.global_logger = icetray.I3NullLogger() @@ -442,12 +471,16 @@

    Source code for graph path, self._gcd_rescue, ) + # checks + assert len(i3_files) == len(gcd_files) # Pack as I3FileSets - filesets = [ - I3FileSet(i3_file, gcd_file) - for i3_file, gcd_file in zip(i3_files, gcd_files) - ] + filesets = [] + for i3_file, gcd_file in zip(i3_files, gcd_files): + assert isinstance(i3_file, str) + assert isinstance(gcd_file, str), print(gcd_file, self._gcd_rescue) + filesets.append(I3FileSet(i3_file, gcd_file)) + return filesets @@ -489,7 +522,7 @@

    Source code for graph Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/readers/internal_parquet_reader.html b/_modules/graphnet/data/readers/internal_parquet_reader.html new file mode 100644 index 000000000..a595fc887 --- /dev/null +++ b/_modules/graphnet/data/readers/internal_parquet_reader.html @@ -0,0 +1,448 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.readers.internal_parquet_reader — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.readers.internal_parquet_reader

    +"""Module containing different reader for GraphNeT internal parquet format."""
    +
    +from typing import List, Union, Dict
    +from glob import glob
    +import os
    +import pandas as pd
    +
    +from graphnet.data.extractors.internal import ParquetExtractor
    +from .graphnet_file_reader import GraphNeTFileReader
    +
    +
    +
    +[docs] +class ParquetReader(GraphNeTFileReader): + """A class for reading the internal GraphNeT parquet format.""" + + _accepted_file_extensions = [".parquet"] + _accepted_extractors = [ParquetExtractor] + + def __call__(self, file_path: str) -> Dict[str, pd.DataFrame]: + """Extract data from single parquet file. + + Args: + file_path: Path to parquet file. + + Returns: + Extracted data. + """ + # Open file + outputs = {} + for extractor in self._extractors: + output = extractor(file_path) + if output is not None: + outputs[extractor._extractor_name] = output + return outputs + +
    +[docs] + def find_files(self, path: Union[str, List[str]]) -> List[str]: + """Search parquet folders for files. + + Args: + path: directory containing the parquet folders. + + Returns: + List of parquet files in the folders. + """ + # Find all I3 and GCD files in the specified directories. + files = [] + if isinstance(path, str): + path = [path] + for p in path: + for extractor in self._extractors: + files.extend( + glob( + os.path.join(p, extractor._extractor_name, "*.parquet") + ) + ) + return files
    +
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/readers/liquido_reader.html b/_modules/graphnet/data/readers/liquido_reader.html new file mode 100644 index 000000000..7286950ab --- /dev/null +++ b/_modules/graphnet/data/readers/liquido_reader.html @@ -0,0 +1,442 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.readers.liquido_reader — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.readers.liquido_reader

    +"""Modules for reading data files from LiquidO."""
    +
    +from typing import List, Union, Dict
    +from glob import glob
    +import os
    +import pandas as pd
    +
    +from graphnet.data.extractors.liquido import H5Extractor
    +from .graphnet_file_reader import GraphNeTFileReader
    +
    +
    +
    +[docs] +class LiquidOReader(GraphNeTFileReader): + """A class for reading h5 files from LiquidO.""" + + _accepted_file_extensions = [".h5"] + _accepted_extractors = [H5Extractor] + + def __call__(self, file_path: str) -> Dict[str, pd.DataFrame]: + """Extract data from single parquet file. + + Args: + file_path: Path to h5 file. + + Returns: + Extracted data. + """ + # Open file + outputs = {} + for extractor in self._extractors: + output = extractor(file_path) + if output is not None: + outputs[extractor._extractor_name] = output + return outputs + +
    +[docs] + def find_files(self, path: Union[str, List[str]]) -> List[str]: + """Search folder(s) for h5 files. + + Args: + path: directory to search for h5 files. + + Returns: + List of h5 files in the folders. + """ + files = [] + if isinstance(path, str): + path = [path] + for p in path: + files.extend(glob(os.path.join(p, "*.h5"))) + return files
    +
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/readers/prometheus_reader.html b/_modules/graphnet/data/readers/prometheus_reader.html new file mode 100644 index 000000000..728856905 --- /dev/null +++ b/_modules/graphnet/data/readers/prometheus_reader.html @@ -0,0 +1,456 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.data.readers.prometheus_reader — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.data.readers.prometheus_reader

    +"""Modules for reading data files from the Prometheus project."""
    +
    +from typing import List, Union, OrderedDict
    +import pandas as pd
    +from pathlib import Path
    +
    +from graphnet.data.extractors.prometheus import PrometheusExtractor
    +from .graphnet_file_reader import GraphNeTFileReader
    +
    +
    +
    +[docs] +class PrometheusReader(GraphNeTFileReader): + """A class for reading parquet files from Prometheus simulation.""" + + _accepted_file_extensions = [".parquet"] + _accepted_extractors = [PrometheusExtractor] + + def __call__(self, file_path: str) -> List[OrderedDict]: + """Extract data from single parquet file. + + Args: + file_path: Path to parquet file. + + Returns: + Extracted data. + """ + # Open file + outputs = [] + file = pd.read_parquet(file_path) + for k in range(len(file)): # Loop over events in file + extracted_event = OrderedDict() + for extractor in self._extractors: + assert isinstance(extractor, PrometheusExtractor) + if extractor._table in file.columns: + output = extractor(file[extractor._table][k]) + extracted_event[extractor._extractor_name] = output + outputs.append(extracted_event) + return outputs + +
    +[docs] + def find_files(self, path: Union[str, List[str]]) -> List[str]: + """Search folder(s) for parquet files. + + Args: + path: directory to search for parquet files. + + Returns: + List of parquet files in the folders. + """ + files = [] + if isinstance(path, str): + path = [path] + + # List of files as Path objects + for p in path: + files.extend( + list(Path(p).rglob(f"*{self.accepted_file_extensions}")) + ) + + # List of files as str's + paths_as_str: List[str] = [] + for f in files: + paths_as_str.append(f.absolute().as_posix()) + + return paths_as_str
    +
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/data/sqlite/deprecated_methods.html b/_modules/graphnet/data/sqlite/deprecated_methods.html index d97e492fc..d431d8126 100644 --- a/_modules/graphnet/data/sqlite/deprecated_methods.html +++ b/_modules/graphnet/data/sqlite/deprecated_methods.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -411,7 +438,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/utilities/parquet_to_sqlite.html b/_modules/graphnet/data/utilities/parquet_to_sqlite.html deleted file mode 100644 index 63a97a6eb..000000000 --- a/_modules/graphnet/data/utilities/parquet_to_sqlite.html +++ /dev/null @@ -1,549 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - graphnet.data.utilities.parquet_to_sqlite — graphnet documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Skip to content -
    - -
    - - -
    - - - - -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    -
    - - -
    -
    -
    - -
    -
    - -

    Source code for graphnet.data.utilities.parquet_to_sqlite

    -"""Utilities for converting files from Parquet to SQLite."""
    -
    -import glob
    -import os
    -from typing import List, Optional, Union
    -
    -import awkward as ak
    -import numpy as np
    -import pandas as pd
    -from tqdm.auto import trange
    -
    -from graphnet.data.utilities.sqlite_utilities import (
    -    create_table_and_save_to_sql,
    -)
    -from graphnet.utilities.logging import Logger
    -
    -
    -
    -[docs] -class ParquetToSQLiteConverter(Logger): - """Convert Parquet files to a SQLite database. - - Each event in the parquet file(s) are assigned a unique event id. By - default, every field in the parquet file(s) are extracted. One can choose - to exclude certain fields by using the argument exclude_fields. - """ - - def __init__( - self, - parquet_path: Union[str, List[str]], - mc_truth_table: str = "mc_truth", - excluded_fields: Optional[Union[str, List[str]]] = None, - ): - """Construct `ParquetToSQLiteConverter`.""" - # checks - if isinstance(parquet_path, str): - pass - elif isinstance(parquet_path, list): - assert isinstance( - parquet_path[0], str - ), "Argument `parquet_path` must be a string or list of strings" - else: - assert isinstance( - parquet_path, str - ), "Argument `parquet_path` must be a string or list of strings" - - assert isinstance( - mc_truth_table, str - ), "Argument `mc_truth_table` must be a string" - self._parquet_files = self._find_parquet_files(parquet_path) - if excluded_fields is not None: - self._excluded_fields = excluded_fields - else: - self._excluded_fields = [] - self._mc_truth_table = mc_truth_table - self._event_counter = 0 - - # Base class constructor - super().__init__(name=__name__, class_name=self.__class__.__name__) - - def _find_parquet_files(self, paths: Union[str, List[str]]) -> List[str]: - if isinstance(paths, str): - if paths.endswith(".parquet"): - files = [paths] - else: - files = glob.glob(f"{paths}/*.parquet") - elif isinstance(paths, list): - files = [] - for path in paths: - files.extend(self._find_parquet_files(path)) - assert len(files) > 0, f"No files found in {paths}" - return files - -
    -[docs] - def run(self, outdir: str, database_name: str) -> None: - """Run Parquet to SQLite conversion. - - Args: - outdir: Output directory for SQLite database. - database_name: Name of output SQLite database. - """ - self._create_output_directories(outdir, database_name) - database_path = os.path.join( - outdir, database_name, "data", database_name + ".db" - ) - self.info(f"Processing {len(self._parquet_files)} Parquet file(s)") - for i in trange( - len(self._parquet_files), - unit="file(s)", - colour="green", - position=0, - ): - parquet_file = ak.from_parquet(self._parquet_files[i]) - n_events_in_file = self._count_events(parquet_file) - for j in trange( - len(parquet_file.fields), - desc="%s" % (self._parquet_files[i].split("/")[-1]), - colour="#ffa500", - position=1, - leave=False, - ): - if parquet_file.fields[j] not in self._excluded_fields: - self._save_to_sql( - database_path, - parquet_file, - parquet_file.fields[j], - n_events_in_file, - ) - self._event_counter += n_events_in_file - self._save_config(outdir, database_name) - self.info( - "Database saved at: \n" - f"{outdir}/{database_name}/data/{database_name}.db" - )
    - - - def _count_events(self, open_parquet_file: ak.Array) -> int: - return len(open_parquet_file[self._mc_truth_table]) - - def _save_to_sql( - self, - database_path: str, - ak_array: ak.Array, - field_name: str, - n_events_in_file: int, - ) -> None: - df = self._convert_to_dataframe(ak_array, field_name, n_events_in_file) - - if len(df) > n_events_in_file: - is_pulse_map = True - else: - is_pulse_map = False - - create_table_and_save_to_sql( - df, - field_name, - database_path, - integer_primary_key=not is_pulse_map, - ) - - def _convert_to_dataframe( - self, - ak_array: ak.Array, - field_name: str, - n_events_in_file: int, - ) -> pd.DataFrame: - df = pd.DataFrame(ak.to_pandas(ak_array[field_name])) - if len(df.columns) == 1: - if df.columns == ["values"]: - df.columns = [field_name] - - if "event_no" in df.columns: - return df - - # If true, the dataframe contains more than 1 row pr. event (i.e., - # pulsemap). - if len(df) != n_events_in_file: - event_nos = [] - c = 0 - for event_no in range( - self._event_counter, self._event_counter + n_events_in_file, 1 - ): - try: - event_nos.extend( - np.repeat(event_no, len(df[df.columns[0]][c])).tolist() - ) - - # KeyError indicates that this df has no entry for event_no - # (e.g., an event with no detector response). - except KeyError: - pass - c += 1 - else: - event_nos = np.arange(0, n_events_in_file, 1) + self._event_counter - df["event_no"] = event_nos - return df - - def _create_output_directories( - self, outdir: str, database_name: str - ) -> None: - os.makedirs(outdir + "/" + database_name + "/data", exist_ok=True) - os.makedirs(outdir + "/" + database_name + "/config", exist_ok=True) - - def _save_config(self, outdir: str, database_name: str) -> None: - """Save the list of converted Parquet files to a CSV file.""" - df = pd.DataFrame(data=self._parquet_files, columns=["files"]) - df.to_csv(outdir + "/" + database_name + "/config/files.csv")
    - -
    - -
    -
    -
    -
    -
    -
    - - -
    - - - - \ No newline at end of file diff --git a/_modules/graphnet/data/utilities/random.html b/_modules/graphnet/data/utilities/random.html index 3ccca08ca..800f43590 100644 --- a/_modules/graphnet/data/utilities/random.html +++ b/_modules/graphnet/data/utilities/random.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -375,7 +402,7 @@

    Source code for graph Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/utilities/sqlite_utilities.html b/_modules/graphnet/data/utilities/sqlite_utilities.html index a3137f96f..d597c655a 100644 --- a/_modules/graphnet/data/utilities/sqlite_utilities.html +++ b/_modules/graphnet/data/utilities/sqlite_utilities.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -397,9 +424,13 @@

    Source code primary_key_candidates.append(val) # There should only be one primary key: - assert len(primary_key_candidates) == 1 + if len(primary_key_candidates) > 0: + assert len(primary_key_candidates) == 1 + primary_key_name = primary_key_candidates[0] + else: + primary_key_name = None - return integer_primary_key, primary_key_candidates[0] + return integer_primary_key, primary_key_name @@ -575,7 +606,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/utilities/string_selection_resolver.html b/_modules/graphnet/data/utilities/string_selection_resolver.html index 8b7938f75..4d6d2a40c 100644 --- a/_modules/graphnet/data/utilities/string_selection_resolver.html +++ b/_modules/graphnet/data/utilities/string_selection_resolver.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -676,7 +703,7 @@

    So Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/writers/graphnet_writer.html b/_modules/graphnet/data/writers/graphnet_writer.html index 872aa88c1..81aeb36eb 100644 --- a/_modules/graphnet/data/writers/graphnet_writer.html +++ b/_modules/graphnet/data/writers/graphnet_writer.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -446,7 +473,7 @@

    Source code fo Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/writers/parquet_writer.html b/_modules/graphnet/data/writers/parquet_writer.html index f854d3e96..f2663fadb 100644 --- a/_modules/graphnet/data/writers/parquet_writer.html +++ b/_modules/graphnet/data/writers/parquet_writer.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -325,10 +352,14 @@

    Source code for """DataConverter for the Parquet backend.""" import os -from typing import List, Optional, Dict +from typing import List, Optional, Dict, Any -import awkward import pandas as pd +import polars as pol +from glob import glob +from tqdm import tqdm +import numpy as np +from multiprocessing import Pool from .graphnet_writer import GraphNeTWriter @@ -338,14 +369,30 @@

    Source code for class ParquetWriter(GraphNeTWriter): """Class for writing interim data format to Parquet.""" - # Class variables - _file_extension = ".parquet" - _merge_dataframes = False + def __init__( + self, + truth_table: str = "truth", + index_column: str = "event_no", + ) -> None: + """Construct `ParquetWriter`. + + Args: + truth_table: Name of the tables containing event-level truth data. + defaults to "truth". + index_column: The column used for indexation. + Defaults to "event_no". + """ + super().__init__(name=__name__, class_name=self.__class__.__name__) + # Class variables + self._file_extension = ".parquet" + self._merge_dataframes = True + self._index_column = index_column + self._truth_table = truth_table # Abstract method implementation(s) def _save_file( self, - data: Dict[str, List[pd.DataFrame]], + data: Dict[str, pd.DataFrame], output_file_path: str, n_events: int, ) -> None: @@ -353,31 +400,189 @@

    Source code for # Check(s) if n_events > 0: - events = [] - for k in range(n_events): - event = {} - for table in data.keys(): - event[table] = data[table][k].to_dict(orient="list") - - events.append(event) - - awkward.to_parquet(awkward.from_iter(events), output_file_path) + for table in data.keys(): + save_path = os.path.dirname(output_file_path) + file_name = os.path.splitext( + os.path.basename(output_file_path) + )[0] + + table_dir = os.path.join(save_path, f"{table}") + os.makedirs(table_dir, exist_ok=True) + df = data[table].set_index(self._index_column) + df.to_parquet( + os.path.join(table_dir, file_name + f"_{table}.parquet") + )
    [docs] - def merge_files(self, files: List[str], output_dir: str) -> None: - """Merge parquet files. + def merge_files( + self, + files: List[str], + output_dir: str, + events_per_batch: int = 200000, + num_workers: int = 1, + ) -> None: + """Convert files into shuffled batches. - Args: - files: input files for merging. - output_dir: directory to store merged file(s) in. + Events will be shuffled, and the resulting batches will constitute + random subsamples of the full dataset. - Raises: - NotImplementedError + Args: + files: Files converted to parquet. Note this argument is ignored + by this method, as these files are automatically found + using the `output_dir`. + output_dir: The directory to store the batched data. + events_per_batch: Number of events in each batch. + Defaults to 200000. + num_workers: Number of workers to use for merging. Defaults to 1. """ - self.error(f"{self.__class__.__name__} does not have a merge method.") - raise NotImplementedError
    - + # Handle inputs + input_dir = output_dir.replace("merged", "") + truth_dir = os.path.join(input_dir, self._truth_table) + tables = os.listdir(input_dir) + self._validate_inputs( + tables=tables, input_dir=input_dir, truth_dir=truth_dir + ) + + truth_files = glob(os.path.join(truth_dir, "*.parquet")) + + # Exit if no files found + if len(truth_files) == 0: + self.warning(f"No files found in {truth_dir}. Exiting.") + return + + # Produce a shuffled master-list of event_no's + truth_meta = self._identify_events( + index_column=self._index_column, + truth_table=self._truth_table, + truth_files=truth_files, + ) + + # Split event_nos into smaller batches "shards" + shards = self._split_dataframe( + df=truth_meta, chunk_size=events_per_batch + ) + + # Construct list of arguments for processing function + arguments = [] + for i in range(len(shards)): + arguments.append( + [ + tables, + shards[i], + input_dir, + i, + self._index_column, + output_dir, + ] + ) + + # Setup map function + if num_workers > 1: + self.info( + f"Processing {len(arguments)} batches using " + f"{num_workers} cores." + ) + pool = Pool(num_workers) + map_func = pool.imap + else: + self.info(f"Processing {len(arguments)} batches in main thread.") + map_func = map # type: ignore + + # Process files + for _ in map_func( + self._process_shard, + tqdm(arguments, unit="shard(s)", colour="green"), + ): + pass + + + def _identify_events( + self, index_column: str, truth_files: List[str], truth_table: str + ) -> pd.DataFrame: + res = pol.DataFrame() + for truth_file in truth_files: + df = pol.read_parquet(truth_file) + df2 = pol.concat( + [ + df.select([index_column]), + pol.DataFrame( + { + "file_name": np.repeat( + truth_file.replace( + f"_{truth_table}.parquet", "" + ).split("/")[-1], + len(df), + ) + } + ).select(["file_name"]), + ], + how="horizontal", + ) + res = pol.concat([res, df2]) + return res.to_pandas().sample(frac=1.0) + + def _split_dataframe( + self, df: pd.DataFrame, chunk_size: int + ) -> List[pd.DataFrame]: + chunks = list() + num_chunks = int(np.ceil(len(df) // chunk_size) + 1) + for i in range(num_chunks): + chunks.append(df[i * chunk_size : (i + 1) * chunk_size]) + return chunks + + def _process_shard(self, settings: List[Any]) -> None: + tables, splits, input_dir, batch_ids, index_column, outdir = settings + if outdir is None: + outdir = os.path.join(input_dir, "merged") + if not isinstance(splits, list): + splits = [splits] + + if not isinstance(batch_ids, list): + batch_ids = [batch_ids] + + for batch_id, split in zip(batch_ids, splits): + unique_files = pd.unique(split["file_name"]) + for table in tables: + table_shards = [] + for unique_file in unique_files: + path = ( + os.path.join(input_dir, table, unique_file) + + f"_{table}.parquet" + ) + df = pd.read_parquet(path) + + id = split[index_column][split["file_name"] == unique_file] + + # Filter out indices that point to empty events + idx = [i for i in id if i in df.index] + table_shards.append(df.loc[idx, :]) + + os.makedirs(os.path.join(outdir, table), exist_ok=True) + if len(table_shards) > 0: + combined_df = pd.concat(table_shards, axis=0) + combined_df.to_parquet( + os.path.join( + outdir, table, f"{table}_{batch_id}.parquet" + ) + ) + + def _validate_inputs( + self, tables: List[str], input_dir: str, truth_dir: str + ) -> None: + try: + assert "merged" not in tables + except AssertionError as e: + self.error( + f"Directory appears to already contain merged files" + f" under {os.path.join(input_dir, 'merged')}" + ) + raise e + try: + assert os.path.isdir(truth_dir) + except AssertionError as e: + self.error(f"Directory for truth {truth_dir} does not exist.") + raise e @@ -403,7 +608,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/data/writers/sqlite_writer.html b/_modules/graphnet/data/writers/sqlite_writer.html index 6a16d4171..1703f796c 100644 --- a/_modules/graphnet/data/writers/sqlite_writer.html +++ b/_modules/graphnet/data/writers/sqlite_writer.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -350,6 +377,7 @@

    Source code for self, merged_database_name: str = "merged.db", max_table_size: Optional[int] = None, + index_column: str = "event_no", ) -> None: """Initialize `SQLiteWriter`. @@ -363,12 +391,14 @@

    Source code for you have many events, as tables exceeding 400 million rows tend to be noticably slower to query. Defaults to None (All events are put into a single database). + index_column: Name of column that contains event id. """ # Member Variables self._file_extension = ".db" self._merge_dataframes = True self._max_table_size = max_table_size self._database_name = merged_database_name + self._index_column = index_column # Add file extension to database name if forgotten if not self._database_name.endswith(self._file_extension): @@ -409,6 +439,7 @@

    Source code for output_file_path, default_type="FLOAT", integer_primary_key=len(df) <= n_events, + index_column=self._index_column, ) saved_any = True @@ -423,6 +454,7 @@

    Source code for self, files: List[str], output_dir: str, + primary_key_rescue: str = "event_no", ) -> None: """SQLite-specific method for merging output files/databases. @@ -437,6 +469,9 @@

    Source code for you have many events, as tables exceeding 400 million rows tend to be noticably slower to query. Defaults to None (All events are put into a single database.) + primary_key_rescue: The name of the columns on which the primary + key is constructed. This will only be used if it is not + possible to infer the primary key name. """ # Warnings if self._max_table_size: @@ -448,10 +483,10 @@

    Source code for # Set variables self._partition_count = 1 + self._primary_key_rescue = primary_key_rescue # Construct full database path database_path = os.path.join(output_dir, self._database_name) - print(database_path) # Start merging if files are given if len(files) > 0: os.makedirs(output_dir, exist_ok=True) @@ -487,10 +522,11 @@

    Source code for # Merge temporary databases into newly created one for file_count, input_file in tqdm(enumerate(files), colour="green"): - # Extract table names and index column name in database try: tables, primary_key = get_primary_keys(database=input_file) + if primary_key is None: + primary_key = self._primary_key_rescue except AssertionError as e: if "No tables found in database." in str(e): self.warning(f"Database {input_file} is empty. Skipping.") @@ -585,7 +621,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/datasets/prometheus_datasets.html b/_modules/graphnet/datasets/prometheus_datasets.html new file mode 100644 index 000000000..fb26e78fa --- /dev/null +++ b/_modules/graphnet/datasets/prometheus_datasets.html @@ -0,0 +1,543 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.datasets.prometheus_datasets — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.datasets.prometheus_datasets

    +"""Public datasets from Prometheus Simulation."""
    +from typing import Dict, Any, List, Tuple, Union
    +import os
    +from sklearn.model_selection import train_test_split
    +from glob import glob
    +import numpy as np
    +
    +from graphnet.training.labels import Direction, Track
    +from graphnet.data import ERDAHostedDataset
    +from graphnet.data.constants import FEATURES
    +from graphnet.data.utilities import query_database
    +
    +
    +
    +[docs] +class PublicPrometheusDataset(ERDAHostedDataset): + """A generic class for public Prometheus Datasets hosted using ERDA.""" + + # Static Member Variables: + _pulsemaps = ["photons"] + _truth_table = "mc_truth" + _event_truth = [ + "interaction", + "initial_state_energy", + "initial_state_type", + "initial_state_zenith", + "initial_state_azimuth", + "initial_state_x", + "initial_state_y", + "initial_state_z", + ] + _pulse_truth = None + _features = FEATURES.PROMETHEUS + + def _prepare_args( + self, backend: str, features: List[str], truth: List[str] + ) -> Tuple[Dict[str, Any], Union[List[int], None], Union[List[int], None]]: + """Prepare arguments for dataset. + + Args: + backend: backend of dataset. Either "parquet" or "sqlite". + features: List of features from user to use as input. + truth: List of event-level truth variables from user. + + Returns: Dataset arguments, train/val selection, test selection + """ + if backend == "sqlite": + dataset_paths = glob(os.path.join(self.dataset_dir, "*.db")) + assert len(dataset_paths) == 1 + dataset_path = dataset_paths[0] + event_nos = query_database( + database=dataset_path, + query=f"SELECT event_no FROM {self._truth_table}", + ) + train_val, test = train_test_split( + event_nos["event_no"].tolist(), + test_size=0.10, + random_state=42, + shuffle=True, + ) + elif backend == "parquet": + dataset_path = self.dataset_dir + n_batches = len( + glob( + os.path.join(dataset_path, self._truth_table, "*.parquet") + ) + ) + train_val, test = train_test_split( + np.arange(0, n_batches), + test_size=0.10, + random_state=42, + shuffle=True, + ) + dataset_args = { + "truth_table": self._truth_table, + "pulsemaps": self._pulsemaps, + "path": dataset_path, + "graph_definition": self._graph_definition, + "features": features, + "truth": truth, + "labels": { + "direction": Direction( + azimuth_key="initial_state_azimuth", + zenith_key="initial_state_zenith", + ), + "track": Track( + pid_key="initial_state_type", interaction_key="interaction" + ), + }, + } + + return dataset_args, train_val, test
    + + + +
    +[docs] +class TRIDENTSmall(PublicPrometheusDataset): + """Public Dataset for Prometheus simulation of a TRIDENT geometry. + + Contains ~ 1 million track events between 10 GeV - 10 TeV. + """ + + _experiment = "TRIDENT Prometheus Simulation" + _creator = "Rasmus F. Ørsøe" + _comments = ( + "Contains ~1 million track events." + " Simulation produced by Stephan Meighen-Berger, " + "U. Melbourne." + ) + _available_backends = ["sqlite"] + _file_hashes = {"sqlite": "aooZEpVsAM"} + _citation = None
    + + + +
    +[docs] +class PONESmall(PublicPrometheusDataset): + """Public Dataset for Prometheus simulation of a P-ONE geometry. + + Contains ~ 1 million track events between 10 GeV - 10 TeV. + """ + + _experiment = "P-ONE Prometheus Simulation" + _creator = "Rasmus F. Ørsøe" + _comments = ( + "Contains ~1 million track events." + " Simulation produced by Stephan Meighen-Berger, " + "U. Melbourne." + ) + _available_backends = ["sqlite"] + _file_hashes = {"sqlite": "GIt0hlG9qI"} + _citation = None
    + + + +
    +[docs] +class BaikalGVDSmall(PublicPrometheusDataset): + """Public Dataset for Prometheus simulation of a Baikal-GVD geometry. + + Contains ~ 1 million track events between 10 GeV - 10 TeV. + """ + + _experiment = "Baikal-GVD Prometheus Simulation" + _creator = "Rasmus F. Ørsøe" + _comments = ( + "Contains ~1 million track events." + " Simulation produced by Stephan Meighen-Berger, " + "U. Melbourne." + ) + _available_backends = ["sqlite"] + _file_hashes = {"sqlite": "FtFs5fxXB7"} + _citation = None
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/datasets/test_dataset.html b/_modules/graphnet/datasets/test_dataset.html new file mode 100644 index 000000000..42ae61be0 --- /dev/null +++ b/_modules/graphnet/datasets/test_dataset.html @@ -0,0 +1,454 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.datasets.test_dataset — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.datasets.test_dataset

    +"""A CuratedDataset for unit tests."""
    +from typing import Dict, Any, List, Tuple, Union
    +import os
    +
    +from graphnet.data import ERDAHostedDataset
    +from graphnet.data.constants import FEATURES
    +
    +
    +
    +[docs] +class TestDataset(ERDAHostedDataset): + """A CuratedDataset class for unit tests of ERDAHosted Datasets. + + This dataset should not be used outside the context of unit tests. + """ + + # Static Member Variables: + _pulsemaps = ["photons"] + _truth_table = "mc_truth" + _event_truth = [ + "interaction", + "initial_state_energy", + "initial_state_type", + "initial_state_zenith", + "initial_state_azimuth", + "initial_state_x", + "initial_state_y", + "initial_state_z", + ] + _pulse_truth = None + _features = FEATURES.PROMETHEUS + _experiment = "ARCA Prometheus Simulation" + _creator = "Rasmus F. Ørsøe" + _comments = ( + "This Dataset should be used for unit tests only." + " Simulation produced by Stephan Meighen-Berger, " + "U. Melbourne." + ) + _available_backends = ["sqlite"] + _file_hashes = {"sqlite": "EK3hSNgYr5"} + _citation = None + + def _prepare_args( + self, backend: str, features: List[str], truth: List[str] + ) -> Tuple[Dict[str, Any], Union[List[int], None], Union[List[int], None]]: + """Prepare arguments for dataset. + + Args: + backend: backend of dataset. Either "parquet" or "sqlite" + features: List of features from user to use as input. + truth: List of event-level truth form user. + + Returns: Dataset arguments and selections + """ + dataset_path = os.path.join(self.dataset_dir, "merged.db") + + dataset_args = { + "truth_table": self._truth_table, + "pulsemaps": self._pulsemaps, + "path": dataset_path, + "graph_definition": self._graph_definition, + "features": features, + "truth": truth, + } + selection = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] # event 5 is empty + return dataset_args, selection, None
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/deployment/deployer.html b/_modules/graphnet/deployment/deployer.html index ea737ce89..a861b606f 100644 --- a/_modules/graphnet/deployment/deployer.html +++ b/_modules/graphnet/deployment/deployer.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -483,7 +510,7 @@

    Source code for graphne Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/deployment/deployment_module.html b/_modules/graphnet/deployment/deployment_module.html index d035ff90b..64cc88236 100644 --- a/_modules/graphnet/deployment/deployment_module.html +++ b/_modules/graphnet/deployment/deployment_module.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -449,7 +476,7 @@

    Source code fo Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/deployment/icecube/cleaning_module.html b/_modules/graphnet/deployment/icecube/cleaning_module.html index 90cb9682f..01270e793 100644 --- a/_modules/graphnet/deployment/icecube/cleaning_module.html +++ b/_modules/graphnet/deployment/icecube/cleaning_module.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -577,7 +604,7 @@

    Source c Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/deployment/icecube/inference_module.html b/_modules/graphnet/deployment/icecube/inference_module.html index a43c935a0..62afac462 100644 --- a/_modules/graphnet/deployment/icecube/inference_module.html +++ b/_modules/graphnet/deployment/icecube/inference_module.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -554,7 +581,7 @@

    Source Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/exceptions/exceptions.html b/_modules/graphnet/exceptions/exceptions.html new file mode 100644 index 000000000..ba18b5143 --- /dev/null +++ b/_modules/graphnet/exceptions/exceptions.html @@ -0,0 +1,395 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.exceptions.exceptions — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.exceptions.exceptions

    +"""Module containing GraphNeT-specific Exceptions."""
    +
    +
    +
    +[docs] +class ColumnMissingException(Exception): + """Exception to indicate a missing column in a dataset."""
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/models/coarsening.html b/_modules/graphnet/models/coarsening.html index 301d35d73..ee7357e9b 100644 --- a/_modules/graphnet/models/coarsening.html +++ b/_modules/graphnet/models/coarsening.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -694,7 +721,7 @@

    Source code for graphnet. Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/components/embedding.html b/_modules/graphnet/models/components/embedding.html index 9e280dde3..f73cb60b1 100644 --- a/_modules/graphnet/models/components/embedding.html +++ b/_modules/graphnet/models/components/embedding.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -327,6 +354,8 @@

    Source code for import torch.nn as nn from torch.functional import Tensor +from typing import Optional + from pytorch_lightning import LightningModule @@ -385,34 +414,60 @@

    Source code for This module incorporates sinusoidal positional embeddings and auxiliary embeddings to process input sequences and produce meaningful - representations. + representations. The module assumes that the input data is in the format of + (x, y, z, time, charge, auxiliary), being the first four features + mandatory. """ def __init__( self, seq_length: int = 128, + mlp_dim: Optional[int] = None, output_dim: int = 384, scaled: bool = False, + n_features: int = 6, ): """Construct `FourierEncoder`. Args: seq_length: Dimensionality of the base sinusoidal positional embeddings. - output_dim: Output dimensionality of the final projection. + mlp_dim (Optional): Size of hidden, latent space of MLP. If not + given, `mlp_dim` is set automatically as multiples of + `seq_length` (in consistent with the 2nd place solution), + depending on `n_features`. + output_dim: Dimension of the output (I.e. number of columns). scaled: Whether or not to scale the embeddings. + n_features: The number of features in the input data. """ super().__init__() + self.sin_emb = SinusoidalPosEmb(dim=seq_length, scaled=scaled) self.aux_emb = nn.Embedding(2, seq_length // 2) self.sin_emb2 = SinusoidalPosEmb(dim=seq_length // 2, scaled=scaled) - self.projection = nn.Sequential( - nn.Linear(6 * seq_length, 6 * seq_length), - nn.LayerNorm(6 * seq_length), + + if n_features < 4: + raise ValueError( + f"At least x, y, z and time of the DOM are required. Got only " + f"{n_features} features." + ) + elif n_features >= 6: + hidden_dim = 6 * seq_length + else: + hidden_dim = int((n_features + 0.5) * seq_length) + + if mlp_dim is None: + mlp_dim = hidden_dim + + self.mlp = nn.Sequential( + nn.Linear(hidden_dim, mlp_dim), + nn.LayerNorm(mlp_dim), nn.GELU(), - nn.Linear(6 * seq_length, output_dim), + nn.Linear(mlp_dim, output_dim), ) + self.n_features = n_features +
    [docs] def forward( @@ -422,19 +477,23 @@

    Source code for ) -> Tensor: """Forward pass.""" length = torch.log10(seq_length.to(dtype=x.dtype)) - x = torch.cat( - [ - self.sin_emb(4096 * x[:, :, :3]).flatten(-2), # pos - self.sin_emb(1024 * x[:, :, 4]), # charge - self.sin_emb(4096 * x[:, :, 3]), # time - self.aux_emb(x[:, :, 5].long()), # auxiliary - self.sin_emb2(length) - .unsqueeze(1) - .expand(-1, max(seq_length), -1), - ], - -1, - ) - x = self.projection(x) + embeddings = [self.sin_emb(4096 * x[:, :, :3]).flatten(-2)] # Position + + if self.n_features >= 5: + embeddings.append(self.sin_emb(1024 * x[:, :, 4])) # Charge + + embeddings.append(self.sin_emb(4096 * x[:, :, 3])) # Time + + if self.n_features >= 6: + embeddings.append(self.aux_emb(x[:, :, 5].long())) # Auxiliary + + embeddings.append( + self.sin_emb2(length).unsqueeze(1).expand(-1, max(seq_length), -1) + ) # Length + + x = torch.cat(embeddings, -1) + x = self.mlp(x) + return x

    @@ -507,7 +566,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/components/layers.html b/_modules/graphnet/models/components/layers.html index 02ee5c87f..8a42cce29 100644 --- a/_modules/graphnet/models/components/layers.html +++ b/_modules/graphnet/models/components/layers.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -999,7 +1026,7 @@

    Source code for gr Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/components/pool.html b/_modules/graphnet/models/components/pool.html index 11ce78f3d..ce921b2ce 100644 --- a/_modules/graphnet/models/components/pool.html +++ b/_modules/graphnet/models/components/pool.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -642,7 +669,7 @@

    Source code for grap Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/detector/detector.html b/_modules/graphnet/models/detector/detector.html index be4d6bf1d..2cc389fd0 100644 --- a/_modules/graphnet/models/detector/detector.html +++ b/_modules/graphnet/models/detector/detector.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -436,7 +463,7 @@

    Source code for gr Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/detector/icecube.html b/_modules/graphnet/models/detector/icecube.html index 67974ea57..b43da6a7e 100644 --- a/_modules/graphnet/models/detector/icecube.html +++ b/_modules/graphnet/models/detector/icecube.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -383,6 +410,13 @@

    Source code for gra class IceCubeKaggle(Detector): """`Detector` class for Kaggle Competition.""" + geometry_table_path = os.path.join( + ICECUBE_GEOMETRY_TABLE_DIR, "icecube86.parquet" + ) + xyz = ["x", "y", "z"] + string_id_column = "string" + sensor_id_column = "sensor_id" +
    [docs] def feature_map(self) -> Dict[str, Callable]: @@ -533,7 +567,7 @@

    Source code for gra

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/detector/liquido.html b/_modules/graphnet/models/detector/liquido.html new file mode 100644 index 000000000..2688671f0 --- /dev/null +++ b/_modules/graphnet/models/detector/liquido.html @@ -0,0 +1,428 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.models.detector.liquido — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.models.detector.liquido

    +"""LiquidO-specific `Detector` class(es)."""
    +
    +from typing import Dict, Callable
    +import torch
    +import os
    +
    +from graphnet.models.detector.detector import Detector
    +from graphnet.constants import LIQUIDO_GEOMETRY_TABLE_DIR
    +
    +
    +
    +[docs] +class LiquidO_v1(Detector): + """`Detector` class for LiquidO prototype.""" + + geometry_table_path = os.path.join( + LIQUIDO_GEOMETRY_TABLE_DIR, "liquido_v1.parquet" + ) + xyz = ["sipm_x", "sipm_y", "sipm_z"] + string_id_column = "fiber_id" + sensor_id_column = "sipm_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sipm_x": self._sipm_xyz, + "sipm_y": self._sipm_xyz, + "sipm_z": self._sipm_xyz, + "t": self._t, + } + return feature_map
    + + + def _sipm_xyz(self, x: torch.tensor) -> torch.tensor: + return x / 1000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 500
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/models/detector/prometheus.html b/_modules/graphnet/models/detector/prometheus.html index 3674c8065..3903aa155 100644 --- a/_modules/graphnet/models/detector/prometheus.html +++ b/_modules/graphnet/models/detector/prometheus.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -332,13 +359,265 @@

    Source code for from graphnet.constants import PROMETHEUS_GEOMETRY_TABLE_DIR +
    +[docs] +class ORCA150SuperDense(Detector): + """`Detector` class for Prometheus ORCA150SuperDense.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "orca_150.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 100 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return (x + 350) / 100 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class TRIDENT1211(Detector): + """`Detector` class for Prometheus TRIDENT1211.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "trident.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 1900 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 3000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class IceCubeUpgrade7(Detector): + """`Detector` class for Prometheus IceCubeUpgrade7.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "icecube_upgrade.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 10 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 2000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class WaterDemo81(Detector): + """`Detector` class for Prometheus WaterDemo81.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "demo_water.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 500 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 2000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class BaikalGVD8(Detector): + """`Detector` class for Prometheus BaikalGVD8.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "gvd.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 10 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 1000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class IceDemo81(Detector): + """`Detector` class for Prometheus IceDemo81.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "demo_ice.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 500 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 3000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class ARCA115(Detector): + """`Detector` class for Prometheus ARCA115.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "arca.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 100 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 1000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + +
    [docs] class ORCA150(Detector): - """`Detector` class for Prometheus prototype.""" + """`Detector` class for Prometheus ORCA150.""" geometry_table_path = os.path.join( - PROMETHEUS_GEOMETRY_TABLE_DIR, "orca_150.parquet" + PROMETHEUS_GEOMETRY_TABLE_DIR, "orca.parquet" ) xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] string_id_column = "sensor_string_id" @@ -357,11 +636,149 @@

    Source code for return feature_map

    + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 10 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 100 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04 + + + +
    +[docs] +class IceCube86Prometheus(Detector): + """`Detector` class for Prometheus IceCube86.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "icecube86.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: return x / 100 def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: - return (x + 350) / 100 + return x / 1000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class IceCubeDeepCore8(Detector): + """`Detector` class for Prometheus IceCubeDeepCore8.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "icecube_deepcore.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xy, + "sensor_pos_y": self._sensor_pos_xy, + "sensor_pos_z": self._sensor_pos_z, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xy(self, x: torch.tensor) -> torch.tensor: + return x / 100 + + def _sensor_pos_z(self, x: torch.tensor) -> torch.tensor: + return x / 1000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class IceCubeGen2(Detector): + """`Detector` class for Prometheus IceCubeGen2.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "icecube_gen2.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xyz, + "sensor_pos_y": self._sensor_pos_xyz, + "sensor_pos_z": self._sensor_pos_xyz, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xyz(self, x: torch.tensor) -> torch.tensor: + return x / 1000 + + def _t(self, x: torch.tensor) -> torch.tensor: + return x / 1.05e04
    + + + +
    +[docs] +class PONETriangle(Detector): + """`Detector` class for Prometheus PONE Triangle.""" + + geometry_table_path = os.path.join( + PROMETHEUS_GEOMETRY_TABLE_DIR, "pone_triangle.parquet" + ) + xyz = ["sensor_pos_x", "sensor_pos_y", "sensor_pos_z"] + string_id_column = "sensor_string_id" + sensor_id_column = "sensor_id" + +
    +[docs] + def feature_map(self) -> Dict[str, Callable]: + """Map standardization functions to each dimension.""" + feature_map = { + "sensor_pos_x": self._sensor_pos_xyz, + "sensor_pos_y": self._sensor_pos_xyz, + "sensor_pos_z": self._sensor_pos_xyz, + "t": self._t, + } + return feature_map
    + + + def _sensor_pos_xyz(self, x: torch.tensor) -> torch.tensor: + return x / 100 def _t(self, x: torch.tensor) -> torch.tensor: return x / 1.05e04
    @@ -370,8 +787,8 @@

    Source code for
    [docs] -class Prometheus(ORCA150): - """Reference to ORCA150."""
    +class Prometheus(ORCA150SuperDense): + """Reference to ORCA150SuperDense.""" @@ -397,7 +814,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/RNN_tito.html b/_modules/graphnet/models/gnn/RNN_tito.html index 155e0f2e8..31dd6c280 100644 --- a/_modules/graphnet/models/gnn/RNN_tito.html +++ b/_modules/graphnet/models/gnn/RNN_tito.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -482,7 +509,7 @@

    Source code for graphne Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/convnet.html b/_modules/graphnet/models/gnn/convnet.html index 9c1b3f2f7..ed02b94df 100644 --- a/_modules/graphnet/models/gnn/convnet.html +++ b/_modules/graphnet/models/gnn/convnet.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -470,7 +497,7 @@

    Source code for graphnet Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/dynedge.html b/_modules/graphnet/models/gnn/dynedge.html index d939a00c7..deacc6246 100644 --- a/_modules/graphnet/models/gnn/dynedge.html +++ b/_modules/graphnet/models/gnn/dynedge.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -358,7 +385,7 @@

    Source code for graphnet readout_layer_sizes: Optional[List[int]] = None, global_pooling_schemes: Optional[Union[str, List[str]]] = None, add_global_variables_after_pooling: bool = False, - activation_layer: Callable = None, + activation_layer: Optional[str] = None, add_norm_layer: bool = False, skip_readout: bool = False, ): @@ -483,8 +510,14 @@

    Source code for graphnet add_global_variables_after_pooling ) - if activation_layer is None: + if activation_layer is None or activation_layer.lower() == "relu": activation_layer = torch.nn.ReLU() + elif activation_layer.lower() == "gelu": + activation_layer = torch.nn.GELU() + else: + raise ValueError( + f"Activation layer {activation_layer} not supported." + ) # Base class constructor super().__init__(nb_inputs, self._readout_layer_sizes[-1]) @@ -695,7 +728,7 @@

    Source code for graphnet Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/dynedge_jinst.html b/_modules/graphnet/models/gnn/dynedge_jinst.html index 54e3e08fc..3e4beece3 100644 --- a/_modules/graphnet/models/gnn/dynedge_jinst.html +++ b/_modules/graphnet/models/gnn/dynedge_jinst.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -505,7 +532,7 @@

    Source code for gr Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/dynedge_kaggle_tito.html b/_modules/graphnet/models/gnn/dynedge_kaggle_tito.html index 61998b585..29744d2af 100644 --- a/_modules/graphnet/models/gnn/dynedge_kaggle_tito.html +++ b/_modules/graphnet/models/gnn/dynedge_kaggle_tito.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -630,7 +657,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/gnn.html b/_modules/graphnet/models/gnn/gnn.html index 4f0a11b6d..11acff46b 100644 --- a/_modules/graphnet/models/gnn/gnn.html +++ b/_modules/graphnet/models/gnn/gnn.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -387,7 +414,7 @@

    Source code for graphnet.mod Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/gnn/icemix.html b/_modules/graphnet/models/gnn/icemix.html index d59d697b9..d65297300 100644 --- a/_modules/graphnet/models/gnn/icemix.html +++ b/_modules/graphnet/models/gnn/icemix.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -333,7 +360,7 @@

    Source code for graphnet. """ import torch import torch.nn as nn -from typing import Set, Dict, Any, List +from typing import Set, Dict, Any from graphnet.models.components.layers import ( Block_rel, @@ -360,7 +387,8 @@

    Source code for graphnet. def __init__( self, hidden_dim: int = 384, - seq_length: int = 128, + mlp_ratio: int = 4, + seq_length: int = 192, depth: int = 12, head_size: int = 32, depth_rel: int = 4, @@ -368,11 +396,13 @@

    Source code for graphnet. scaled_emb: bool = False, include_dynedge: bool = False, dynedge_args: Dict[str, Any] = None, + n_features: int = 6, ): """Construct `DeepIce`. Args: hidden_dim: The latent feature dimension. + mlp_ratio: Mlp expansion ratio of FourierEncoder and Transformer. seq_length: The base feature dimension. depth: The depth of the transformer. head_size: The size of the attention heads. @@ -385,11 +415,16 @@

    Source code for graphnet. provided, DynEdge will be initialized with the original Kaggle Competition settings. If `include_dynedge` is False, this argument have no impact. + n_features: The number of features in the input data. """ super().__init__(seq_length, hidden_dim) fourier_out_dim = hidden_dim // 2 if include_dynedge else hidden_dim self.fourier_ext = FourierEncoder( - seq_length, fourier_out_dim, scaled=scaled_emb + seq_length=seq_length, + mlp_dim=None, + output_dim=fourier_out_dim, + scaled=scaled_emb, + n_features=n_features, ) self.rel_pos = SpacetimeEncoder(head_size) self.sandwich = nn.ModuleList( @@ -406,7 +441,7 @@

    Source code for graphnet. Block( input_dim=hidden_dim, num_heads=hidden_dim // head_size, - mlp_ratio=4, + mlp_ratio=mlp_ratio, drop_path=0.0 * (i / (depth - 1)), init_values=1, ) @@ -428,7 +463,7 @@

    Source code for graphnet. (336, 256), ], global_pooling_schemes=None, - activation_layer=nn.GELU(), + activation_layer="gelu", add_norm_layer=True, skip_readout=True, ) @@ -514,7 +549,7 @@

    Source code for graphnet. Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/graphs/edges/edges.html b/_modules/graphnet/models/graphs/edges/edges.html index c8778c6a0..cb3d3fbe5 100644 --- a/_modules/graphnet/models/graphs/edges/edges.html +++ b/_modules/graphnet/models/graphs/edges/edges.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -545,7 +572,7 @@

    Source code for g Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/graphs/edges/minkowski.html b/_modules/graphnet/models/graphs/edges/minkowski.html index eb0721ff9..1c218cda2 100644 --- a/_modules/graphnet/models/graphs/edges/minkowski.html +++ b/_modules/graphnet/models/graphs/edges/minkowski.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -451,7 +478,7 @@

    Source code f Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/graphs/graph_definition.html b/_modules/graphnet/models/graphs/graph_definition.html index 141e4a4e4..5b13ae1db 100644 --- a/_modules/graphnet/models/graphs/graph_definition.html +++ b/_modules/graphnet/models/graphs/graph_definition.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -801,7 +828,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/graphs/graphs.html b/_modules/graphnet/models/graphs/graphs.html index 7e77b2ccc..55767ca84 100644 --- a/_modules/graphnet/models/graphs/graphs.html +++ b/_modules/graphnet/models/graphs/graphs.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -405,7 +432,7 @@

    Source code for graphn Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/graphs/nodes/nodes.html b/_modules/graphnet/models/graphs/nodes/nodes.html index e11b5d06f..139545acf 100644 --- a/_modules/graphnet/models/graphs/nodes/nodes.html +++ b/_modules/graphnet/models/graphs/nodes/nodes.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -324,7 +351,7 @@

    Source code for graphnet.models.graphs.nodes.nodes

     """Class(es) for building/connecting graphs."""
     
    -from typing import List, Tuple, Optional, Union
    +from typing import List, Tuple, Optional, Dict
     from abc import abstractmethod
     
     import torch
    @@ -666,7 +693,12 @@ 

    Source code for g input_feature_names: Optional[List[str]] = None, max_pulses: int = 768, z_name: str = "dom_z", - hlc_name: str = "hlc", + hlc_name: Optional[str] = "hlc", + add_ice_properties: bool = True, + ice_args: Dict[str, Optional[float]] = { + "z_offset": None, + "z_scaling": None, + }, ) -> None: """Construct `IceMixNodes`. @@ -676,9 +708,11 @@

    Source code for g max_pulses: Maximum number of pulses to keep in the event. z_name: Name of the z-coordinate column. hlc_name: Name of the `Hard Local Coincidence Check` column. + add_ice_properties: If True, scattering and absoption length of + ice in IceCube are added to the feature set based on z coordinate. + ice_args: Offset and scaling of the z coordinate in the Detector, + to be able to make similar conversion in the ice data. """ - super().__init__(input_feature_names=input_feature_names) - if input_feature_names is None: input_feature_names = [ "dom_x", @@ -690,33 +724,39 @@

    Source code for g "rde", ] - if z_name not in input_feature_names: - raise ValueError( - f"z name {z_name} not found in " - f"input_feature_names {input_feature_names}" - ) + if add_ice_properties: + if z_name not in input_feature_names: + raise ValueError( + f"z name '{z_name}' not found in " + f"input_feature_names {input_feature_names}" + ) + self.all_features = input_feature_names + [ + "scatt_lenght", + "abs_lenght", + ] + self.f_scattering, self.f_absoprtion = ice_transparency(**ice_args) + else: + self.all_features = input_feature_names + + super().__init__(input_feature_names=input_feature_names) + if hlc_name not in input_feature_names: - raise ValueError( - f"hlc name {hlc_name} not found in " - f"input_feature_names {input_feature_names}" + self.warning( + f"hlc name '{hlc_name}' not found in input_feature_names" + f" '{input_feature_names}', subsampling will be random." ) - - self.all_features = input_feature_names + [ - "scatt_lenght", - "abs_lenght", - ] + hlc_name = None self.feature_indexes = { feat: self.all_features.index(feat) for feat in input_feature_names } - self.f_scattering, self.f_absoprtion = ice_transparency() - self.input_feature_names = input_feature_names self.n_features = len(self.all_features) self.max_length = max_pulses self.z_name = z_name self.hlc_name = hlc_name + self.add_ice_properties = add_ice_properties def _define_output_feature_names( self, input_feature_names: List[str] @@ -743,35 +783,47 @@

    Source code for g ids = torch.arange(event_length) else: ids = torch.randperm(event_length) - auxiliary_n = torch.nonzero( - x[:, self.feature_indexes[self.hlc_name]] == 0 - ).squeeze(1) - auxiliary_p = torch.nonzero( - x[:, self.feature_indexes[self.hlc_name]] == 1 - ).squeeze(1) - ids_n = ids[auxiliary_n][: min(self.max_length, len(auxiliary_n))] - ids_p = ids[auxiliary_p][ - : min(self.max_length - len(ids_n), len(auxiliary_p)) - ] - ids = torch.cat([ids_n, ids_p]).sort().values + if self.hlc_name is not None: + auxiliary_n = torch.nonzero( + x[:, self.feature_indexes[self.hlc_name]] == 0 + ).squeeze(1) + auxiliary_p = torch.nonzero( + x[:, self.feature_indexes[self.hlc_name]] == 1 + ).squeeze(1) + ids_n = ids[auxiliary_n][ + : min(self.max_length, len(auxiliary_n)) + ] + ids_p = ids[auxiliary_p][ + : min(self.max_length - len(ids_n), len(auxiliary_p)) + ] + + ids = torch.cat([ids_n, ids_p]).sort().values + else: + ids = ids[: self.max_length] + return ids def _construct_nodes(self, x: torch.Tensor) -> Tuple[Data, List[str]]: event_length = x.shape[0] - x[:, self.feature_indexes[self.hlc_name]] = torch.logical_not( - x[:, self.feature_indexes[self.hlc_name]] - ) # hlc in kaggle was flipped + if self.hlc_name is not None: + x[:, self.feature_indexes[self.hlc_name]] = torch.logical_not( + x[:, self.feature_indexes[self.hlc_name]] + ) # hlc in kaggle was flipped ids = self._pulse_sampler(x, event_length) event_length = min(self.max_length, event_length) graph = torch.zeros([event_length, self.n_features]) - for idx, feature in enumerate( - self.all_features[: self.n_features - 2] - ): + + if self.add_ice_properties: + graph = self._add_ice_properties(graph, x, ids) + non_ice_features = self.all_features[: self.n_features - 2] + else: + non_ice_features = self.all_features + + for idx, feature in enumerate(non_ice_features): graph[:event_length, idx] = x[ids, self.feature_indexes[feature]] - graph = self._add_ice_properties(graph, x, ids) # ice properties return Data(x=graph)

    @@ -798,7 +850,7 @@

    Source code for g Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/graphs/utils.html b/_modules/graphnet/models/graphs/utils.html index 18ba17add..99d9f1c47 100644 --- a/_modules/graphnet/models/graphs/utils.html +++ b/_modules/graphnet/models/graphs/utils.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -324,7 +351,7 @@

    Source code for graphnet.models.graphs.utils

     """Utility functions for construction of graphs."""
     
    -from typing import List, Tuple
    +from typing import List, Tuple, Optional, Dict, Union
     import os
     import numpy as np
     import pandas as pd
    @@ -503,13 +530,19 @@ 

    Source code for graphne
    [docs] -def ice_transparency() -> Tuple[interp1d, interp1d]: +def ice_transparency( + z_offset: float = None, z_scaling: float = None +) -> Tuple[interp1d, interp1d]: """Return interpolation functions for optical properties of IceCube. NOTE: The resulting interpolation functions assumes that the Z-coordinate of pulse are scaled as `z = z/500`. Any deviation from this scaling method results in inaccurate results. + Args: + z_offset: Offset to be added to the depth of the DOM. + z_scaling: Scaling factor to be applied to the depth of the DOM. + Returns: f_scattering: Function that takes a normalized depth and returns the corresponding normalized scattering length. @@ -520,8 +553,11 @@

    Source code for graphne df = pd.read_parquet( os.path.join(DATA_DIR, "ice_properties/ice_transparency.parquet"), ) - df["z"] = df["depth"] - 1950 - df["z_norm"] = df["z"] / 500 + + z_offset = z_offset or -1950.0 + z_scaling = z_scaling or 500.0 + + df["z_norm"] = (df["depth"] + z_offset) / z_scaling df[ ["scattering_len_norm", "absorption_len_norm"] ] = RobustScaler().fit_transform(df[["scattering_len", "absorption_len"]]) @@ -554,7 +590,7 @@

    Source code for graphne

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/model.html b/_modules/graphnet/models/model.html index 4449c04fe..15bde9cce 100644 --- a/_modules/graphnet/models/model.html +++ b/_modules/graphnet/models/model.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -519,7 +546,7 @@

    Source code for graphnet.model

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/rnn/node_rnn.html b/_modules/graphnet/models/rnn/node_rnn.html index e508b91e5..09ef6b1c0 100644 --- a/_modules/graphnet/models/rnn/node_rnn.html +++ b/_modules/graphnet/models/rnn/node_rnn.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -491,7 +518,7 @@

    Source code for graphne Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/standard_averaged_model.html b/_modules/graphnet/models/standard_averaged_model.html index b6bf471e3..56b377034 100644 --- a/_modules/graphnet/models/standard_averaged_model.html +++ b/_modules/graphnet/models/standard_averaged_model.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -518,7 +545,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/standard_model.html b/_modules/graphnet/models/standard_model.html index badd936c1..bcf11812f 100644 --- a/_modules/graphnet/models/standard_model.html +++ b/_modules/graphnet/models/standard_model.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -918,7 +945,7 @@

    Source code for graph Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/task/classification.html b/_modules/graphnet/models/task/classification.html index cc251f57b..0ca049558 100644 --- a/_modules/graphnet/models/task/classification.html +++ b/_modules/graphnet/models/task/classification.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -397,7 +424,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/task/reconstruction.html b/_modules/graphnet/models/task/reconstruction.html index 381966505..11dcdfe1a 100644 --- a/_modules/graphnet/models/task/reconstruction.html +++ b/_modules/graphnet/models/task/reconstruction.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -618,7 +645,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/task/task.html b/_modules/graphnet/models/task/task.html index 97f4dce52..526a12357 100644 --- a/_modules/graphnet/models/task/task.html +++ b/_modules/graphnet/models/task/task.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -830,7 +857,7 @@

    Source code for graphnet.m Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/models/transformer/iseecube.html b/_modules/graphnet/models/transformer/iseecube.html new file mode 100644 index 000000000..f7373af7e --- /dev/null +++ b/_modules/graphnet/models/transformer/iseecube.html @@ -0,0 +1,495 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + graphnet.models.transformer.iseecube — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +

    Source code for graphnet.models.transformer.iseecube

    +"""Implementation of ISeeCube Transformer architecture used in.
    +
    +https://github.com/ChenLi2049/ISeeCube/
    +"""
    +
    +import torch
    +import torch.nn as nn
    +
    +from graphnet.models.components.embedding import FourierEncoder
    +from graphnet.models.gnn.gnn import GNN
    +from graphnet.models.utils import array_to_sequence
    +
    +from torchscale.architecture.config import EncoderConfig
    +from torchscale.architecture.encoder import Encoder
    +
    +from torch_geometric.data import Data
    +from torch import Tensor
    +
    +
    +
    +[docs] +class ISeeCube(GNN): + """ISeeCube model.""" + + def __init__( + self, + hidden_dim: int = 384, + seq_length: int = 196, + num_layers: int = 16, + num_heads: int = 12, + mlp_dim: int = 1536, + rel_pos_buckets: int = 32, + max_rel_pos: int = 256, + num_register_tokens: int = 3, + scaled_emb: bool = False, + n_features: int = 6, + ): + """Construct `ISeeCube`. + + Args: + hidden_dim: The latent feature dimension. + seq_length: The number of pulses in a neutrino event. + num_layers: The depth of the transformer. + num_heads: The number of the attention heads. + mlp_dim: The mlp dimension of FourierEncoder and Transformer. + rel_pos_buckets: Relative position buckets for relative position + bias. + max_rel_pos: Maximum relative position for relative position bias. + num_register_tokens: The number of register tokens. + scaled_emb: Whether to scale the sinusoidal positional embeddings. + n_features: The number of features in the input data. + """ + super().__init__(seq_length, hidden_dim) + self.fourier_ext = FourierEncoder( + seq_length=seq_length, + mlp_dim=mlp_dim, + output_dim=hidden_dim, + scaled=scaled_emb, + n_features=n_features, + ) + self.pos_embedding = nn.Parameter( + torch.empty(1, seq_length, hidden_dim).normal_(std=0.02), + requires_grad=True, + ) + + self.class_token = nn.Parameter( + torch.empty(1, 1, hidden_dim), + requires_grad=True, + ) + self.register_tokens = nn.Parameter( + torch.empty(1, num_register_tokens, hidden_dim), + requires_grad=True, + ) + + encoder_config = EncoderConfig( + encoder_attention_heads=num_heads, + encoder_embed_dim=hidden_dim, + encoder_ffn_embed_dim=mlp_dim, + encoder_layers=num_layers, + rel_pos_buckets=rel_pos_buckets, + max_rel_pos=max_rel_pos, + ) + self.encoder = Encoder(encoder_config) + + self.layer_norm = nn.LayerNorm(hidden_dim) + +
    +[docs] + def forward(self, data: Data) -> Tensor: + """Apply learnable forward pass.""" + x, _, _ = array_to_sequence(data.x, data.batch, padding_value=0) + x = self.fourier_ext(x) + batch_size = x.shape[0] + + x += self.pos_embedding + + batch_class_token = self.class_token.expand(batch_size, -1, -1) + batch_register_tokens = self.register_tokens.expand(batch_size, -1, -1) + x = torch.cat([batch_class_token, batch_register_tokens, x], dim=1) + + x = self.encoder(src_tokens=None, token_embeddings=x) + x = x["encoder_out"] + + x = self.layer_norm(x) + + return x[:, 0]
    +
    + +
    + +
    +
    +
    +
    +
    +
    + + +
    + + + + \ No newline at end of file diff --git a/_modules/graphnet/models/utils.html b/_modules/graphnet/models/utils.html index 2640186a4..edf92c21b 100644 --- a/_modules/graphnet/models/utils.html +++ b/_modules/graphnet/models/utils.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -463,7 +490,7 @@

    Source code for graphnet.model Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/pisa/fitting.html b/_modules/graphnet/pisa/fitting.html deleted file mode 100644 index cb80d18d0..000000000 --- a/_modules/graphnet/pisa/fitting.html +++ /dev/null @@ -1,1183 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - graphnet.pisa.fitting — graphnet documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Skip to content -
    - -
    - - -
    - - - - -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    -
    - - -
    -
    -
    - -
    -
    - -

    Source code for graphnet.pisa.fitting

    -"""Functions and classes for fitting contours using PISA."""
    -
    -import configparser
    -from contextlib import contextmanager
    -import io
    -import multiprocessing
    -import os
    -import random
    -from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING
    -
    -from configupdater import ConfigUpdater
    -import matplotlib as mpl
    -import matplotlib.pyplot as plt
    -import numpy as np
    -import pandas as pd
    -
    -from graphnet.utilities.imports import has_pisa_package
    -
    -if has_pisa_package() or TYPE_CHECKING:
    -    import pisa  # pyright: reportMissingImports=false
    -    from pisa.core.distribution_maker import DistributionMaker
    -    from pisa.core.pipeline import Pipeline
    -    from pisa.analysis.analysis import Analysis
    -    from pisa import ureg
    -
    -from graphnet.data.utilities import create_table_and_save_to_sql
    -
    -mpl.use("pdf")
    -plt.rc("font", family="serif")
    -
    -
    -
    -[docs] -@contextmanager -def config_updater( - config_path: str, - new_config_path: Optional[str] = None, - dummy_section: str = "temp", -) -> ConfigUpdater: - """Update config files and saves them to file. - - Args: - config_path: Path to original config file. - new_config_path: Path to save updated config file. - dummy_section: Dummy section name to use for config files without - section headers. - - Yields: - ConfigUpdater instance for programatically updating config file. - """ - # Modify original config file is no new config path is provided. - if new_config_path is None: - new_config_path = config_path - - # Load config file - updater = ConfigUpdater() - has_dummy_section = False - try: - updater.read(config_path) - - # If it is missing section headers (e.g., binning.cfg), add a dummy section - # header before reading file contents. - except configparser.MissingSectionHeaderError: - with open(config_path, "r") as configfile: - updater.read_string(f"[{dummy_section}]\n" + configfile.read()) - has_dummy_section = True - - # Expose updater instance in contest (i.e., - # `with config_updater(...) as updater:``). - try: - yield updater - - # Write new config to file - finally: - with open(new_config_path, "w") as configfile: - if has_dummy_section: - # Removing dummy section header if necessary - with io.StringIO() as buffer: - updater.write(buffer) - buffer.seek(0) - lines = buffer.readlines()[1:] - configfile.writelines(lines) - else: - updater.write(configfile)
    - - - -
    -[docs] -class WeightFitter: - """Class for fitting weights using PISA.""" - - def __init__( - self, - database_path: str, - truth_table: str = "truth", - index_column: str = "event_no", - statistical_fit: bool = False, - ) -> None: - """Construct `WeightFitter`.""" - self._database_path = database_path - self._truth_table = truth_table - self._index_column = index_column - self._statistical_fit = statistical_fit - -
    -[docs] - def fit_weights( - self, - config_outdir: str, - weight_name: str = "", - pisa_config_dict: Optional[Dict] = None, - add_to_database: bool = False, - ) -> pd.DataFrame: - """Fit flux weights to each neutrino event in `self._database_path`. - - If `statistical_fit=True`, only statistical effects are accounted for. - If `True`, certain systematic effects are included, but not - hypersurfaces. - - Args: - config_outdir: The output directory in which to store the - configuration. - weight_name: The name of the weight. If `add_to_database=True`, - this will be the name of the table. - pisa_config_dict: The dictionary of PISA configurations. Can be - used to change assumptions regarding the fit. - add_to_database: If `True`, a table will be added to the database - called `weight_name` with two columns: - `[index_column, weight_name]` - - Returns: - A dataframe with columns `[index_column, weight_name]`. - """ - # If its a standard weight - if pisa_config_dict is None: - if not weight_name: - print(weight_name) - weight_name = "pisa_weight_graphnet_standard" - - # If it is a custom weight without name - elif pisa_config_dict is not None: - if not weight_name: - weight_name = "pisa_custom_weight" - - pisa_config_path = self._make_config( - config_outdir, weight_name, pisa_config_dict - ) - - model = Pipeline(pisa_config_path) - - if self._statistical_fit == "True": - # Only free parameters will be [aeff_scale] - corresponding to a statistical fit - free_params = model.params.free.names - for free_param in free_params: - if free_param not in ["aeff_scale"]: - model.params[free_param].is_fixed = True - - # for stage in range(len(model.stages)): - model.stages[-1].apply_mode = "events" - model.stages[-1].calc_mode = "events" - model.run() - - all_data = [] - for container in model.data: - data = pd.DataFrame(container["event_no"], columns=["event_no"]) - data[weight_name] = container["weights"] - all_data.append(data) - results = pd.concat(all_data) - - if add_to_database: - create_table_and_save_to_sql( - results.columns, weight_name, self._database_path - ) - return results.sort_values("event_no").reset_index(drop=True)
    - - - def _make_config( - self, - config_outdir: str, - weight_name: str, - pisa_config_dict: Optional[Dict] = None, - ) -> str: - os.makedirs(config_outdir + "/" + weight_name, exist_ok=True) - if pisa_config_dict is None: - # Run on standard settings - pisa_config_dict = { - "reco_energy": {"num_bins": 8}, - "reco_coszen": {"num_bins": 8}, - "pid": {"bin_edges": [0, 0.5, 1]}, - "true_energy": {"num_bins": 200}, - "true_coszen": {"num_bins": 200}, - "livetime": 10 - * 0.01, # set to 1% of 10 years - correspond to the size of the oscNext burn sample - } - - pisa_config_dict["pipeline"] = self._database_path - pisa_config_dict["post_fix"] = None - pipeline_cfg_path = self._create_configs( - pisa_config_dict, config_outdir + "/" + weight_name - ) - return pipeline_cfg_path - - def _create_configs(self, config_dict: Dict, path: str) -> str: - # Update binning config - root = os.path.realpath( - os.path.join(os.getcwd(), os.path.dirname(__file__)) - ) - if config_dict["post_fix"] is not None: - config_name = "config%s" % config_dict["post_fix"] - else: - # config_dict["post_fix"] = '_pred' - config_name = "config" - - with config_updater( - root - + "/resources/configuration_templates/binning_config_template.cfg", - "%s/binning_%s.cfg" % (path, config_name), - dummy_section="binning", - ) as updater: - updater["binning"][ - "graphnet_dynamic_binning.reco_energy" - ].value = ( - "{'num_bins':%s, 'is_log':True, 'domain':[0.5,55] * units.GeV, 'tex': r'E_{\\rm reco}'}" - % config_dict["reco_energy"]["num_bins"] - ) # noqa: W605 - updater["binning"][ - "graphnet_dynamic_binning.reco_coszen" - ].value = ( - "{'num_bins':%s, 'is_lin':True, 'domain':[-1,1], 'tex':r'\\cos{\\theta}_{\\rm reco}'}" - % config_dict["reco_coszen"]["num_bins"] - ) # noqa: W605 - updater["binning"]["graphnet_dynamic_binning.pid"].value = ( - "{'bin_edges': %s, 'tex':r'{\\rm PID}'}" - % config_dict["pid"]["bin_edges"] - ) # noqa: W605 - updater["binning"]["true_allsky_fine.true_energy"].value = ( - "{'num_bins':%s, 'is_log':True, 'domain':[1,1000] * units.GeV, 'tex': r'E_{\\rm true}'}" - % config_dict["true_energy"]["num_bins"] - ) # noqa: W605 - updater["binning"]["true_allsky_fine.true_coszen"].value = ( - "{'num_bins':%s, 'is_lin':True, 'domain':[-1,1], 'tex':r'\\cos\,\\theta_{Z,{\\rm true}}'}" # noqa: W605 - % config_dict["true_coszen"]["num_bins"] - ) # noqa: W605 - - # Update pipeline config - with config_updater( - root - + "/resources/configuration_templates/pipeline_config_weight_template.cfg", - "%s/pipeline_%s.cfg" % (path, config_name), - ) as updater: - updater["pipeline"].add_before.comment( - "#include %s/binning_%s.cfg as binning" % (path, config_name) - ) - updater["data.sqlite_loader"]["post_fix"].value = config_dict[ - "post_fix" - ] - updater["data.sqlite_loader"]["database"].value = config_dict[ - "pipeline" - ] - if "livetime" in config_dict.keys(): - updater["aeff.aeff"]["param.livetime"].value = ( - "%s * units.common_year" % config_dict["livetime"] - ) - return "%s/pipeline_%s.cfg" % (path, config_name)
    - - - -
    -[docs] -class ContourFitter: - """Class for fitting contours using PISA.""" - - def __init__( - self, - outdir: str, - pipeline_path: str, - post_fix: str = "_pred", - model_name: str = "gnn", - include_retro: bool = True, - statistical_fit: bool = False, - ): - """Construct `ContourFitter`.""" - self._outdir = outdir - self._pipeline_path = pipeline_path - self._post_fix = post_fix - self._model_name = model_name - self._include_retro = include_retro - self._statistical_fit = str(statistical_fit) - self._allowed_contour_types = ["1d", "2d"] - -
    -[docs] - def fit_1d_contour( - self, - run_name: str, - config_dict: Dict, - grid_size: int = 30, - n_workers: int = 1, - theta23_minmax: Tuple[float, float] = (36.0, 54.0), - dm31_minmax: Tuple[float, float] = (2.3, 2.7), - ) -> None: - """Fit 1D contours.""" - self._fit_contours( - config_dict=config_dict, - run_name=run_name, - grid_size=grid_size, - n_workers=n_workers, - theta23_minmax=theta23_minmax, - dm31_minmax=dm31_minmax, - contour_type="1d", - )
    - - -
    -[docs] - def fit_2d_contour( - self, - run_name: str, - config_dict: Dict, - grid_size: int = 30, - n_workers: int = 1, - theta23_minmax: Tuple[float, float] = (36.0, 54.0), - dm31_minmax: Tuple[float, float] = (2.3, 2.7), - ) -> None: - """Fit 2D contours.""" - self._fit_contours( - config_dict=config_dict, - run_name=run_name, - grid_size=grid_size, - n_workers=n_workers, - theta23_minmax=theta23_minmax, - dm31_minmax=dm31_minmax, - contour_type="2d", - )
    - - - def _check_inputs( - self, - contour_type: str, - dm31_minmax: Tuple[float, float], - theta23_minmax: Tuple[float, float], - n_workers: int, - ) -> bool: - """Check whether inputs are as expected.""" - if contour_type.lower() not in self._allowed_contour_types: - print( - "%s not recognized as valid contour type. Only %s is recognized" - % (contour_type, self._allowed_contour_types) - ) - return False - if ( - (len(theta23_minmax) != 2) - or (len(dm31_minmax) != 2) - or (dm31_minmax[0] > dm31_minmax[1]) - or (theta23_minmax[0] > theta23_minmax[1]) - ): - print( - "theta23 or dm31 min max values are not understood. Please provide a list on the form [min, max] for both variables" - ) - return False - if n_workers < 1: - print("found n_workers < 1. n_workers must be positive integers.") - return False - return True - - def _fit_contours( - self, - run_name: str, - config_dict: Dict, - grid_size: int, - n_workers: int, - contour_type: str, - theta23_minmax: Tuple[float, float], - dm31_minmax: Tuple[float, float], - ) -> None: - """Fit contours.""" - inputs_ok = self._check_inputs( - contour_type=contour_type, - dm31_minmax=dm31_minmax, - theta23_minmax=theta23_minmax, - n_workers=n_workers, - ) - if not inputs_ok: - return - - minimizer_cfg = self._get_minimizer_path(config_dict) - cfgs = self._setup_config_files(run_name, config_dict) - theta23_range = np.linspace( - theta23_minmax[0], theta23_minmax[1], grid_size - ) - dm31_range = ( - np.linspace(dm31_minmax[0], dm31_minmax[1], grid_size) * 1e-3 - ) - if contour_type.lower() == "1d": - settings = self._make_1d_settings( - cfgs=cfgs, - grid_size=grid_size, - run_name=run_name, - minimizer_cfg=minimizer_cfg, - theta23_range=theta23_range, - dm31_range=dm31_range, - n_workers=n_workers, - ) - p = multiprocessing.Pool(processes=len(settings)) - _ = p.map_async(self._parallel_fit_1d_contour, settings) - p.close() - p.join() - # self._parallel_fit_1d_contour(settings[0]) - elif contour_type.lower() == "2d": - settings = self._make_2d_settings( - cfgs=cfgs, - grid_size=grid_size, - run_name=run_name, - minimizer_cfg=minimizer_cfg, - theta23_range=theta23_range, - dm31_range=dm31_range, - n_workers=n_workers, - ) - p = multiprocessing.Pool(processes=len(settings)) - _ = p.map_async(self._parallel_fit_2d_contour, settings) - p.close() - p.join() - # self._parallel_fit_2d_contour(settings[0]) - df = self._merge_temporary_files(run_name) - df.to_csv(self._outdir + "/" + run_name + "/merged_results.csv") - - def _merge_temporary_files(self, run_name: str) -> pd.DataFrame: - files = os.listdir(self._outdir + "/" + run_name + "/tmp") - df = pd.concat( - [ - pd.read_csv(f"{self._outdir}/{run_name}/tmp/{file}") - for file in files - ], - ignore_index=True, - ) - return df - - def _parallel_fit_2d_contour(self, settings: List[List[Any]]) -> None: - """Fit 2D contours in parallel. - - Length of settings determines the amount of jobs this worker gets. - Results are saved to temporary .csv-files that are later merged. - - Args: - settings: A list of fitting settings. - """ - results = [] - for i in range(len(settings)): - ( - cfg_path, - model_name, - outdir, - theta23_value, - deltam31_value, - id, - run_name, - fix_all, - minimizer_cfg, - ) = settings[i] - minimizer_cfg = pisa.utils.fileio.from_file(minimizer_cfg) - model = DistributionMaker([cfg_path]) - data = model.get_outputs(return_sum=True) - ana = Analysis() - if fix_all == "True": - # Only free parameters will be [parameter, aeff_scale] - corresponding to a statistical fit - free_params = model.params.free.names - for free_param in free_params: - if free_param != "aeff_scale": - if free_param == "theta23": - model.params.theta23.is_fixed = True - model.params.theta23.nominal_value = ( - float(theta23_value) * ureg.degree - ) - elif free_param == "deltam31": - model.params.deltam31.is_fixed = True - model.params.deltam31.nominal_value = ( - float(deltam31_value) * ureg.electron_volt**2 - ) - else: - model.params[free_param].is_fixed = True - else: - # Only fixed parameters will be [parameter] - model.params.theta23.is_fixed = True - model.params.deltam31.is_fixed = True - model.params.theta23.nominal_value = ( - float(theta23_value) * ureg.degree - ) - model.params.deltam31.nominal_value = ( - float(deltam31_value) * ureg.electron_volt**2 - ) - model.reset_all() - result = ana.fit_hypo( - data, - model, - metric="mod_chi2", - minimizer_settings=minimizer_cfg, - fit_octants_separately=True, - ) - results.append( - [ - theta23_value, - deltam31_value, - result[0]["params"].theta23.value, - result[0]["params"].deltam31.value, - result[0]["metric_val"], - model_name, - id, - result[0]["minimizer_metadata"]["success"], - ] - ) - self._save_temporary_results( - outdir=outdir, run_name=run_name, results=results, id=id - ) - - def _parallel_fit_1d_contour(self, settings: List[List[Any]]) -> None: - """Fit 1D contours in parallel. - - Length of settings determines the amount of jobs this worker gets. - Results are saved to temporary .csv-files that are later merged. - - Args: - settings: A list of fitting settings. - """ - results = [] - for i in range(len(settings)): - ( - cfg_path, - model_name, - outdir, - theta23_value, - deltam31_value, - id, - run_name, - parameter, - fix_all, - minimizer_cfg, - ) = settings[i] - minimizer_cfg = pisa.utils.fileio.from_file(minimizer_cfg) - ana = Analysis() - model = DistributionMaker([cfg_path]) - data = model.get_outputs(return_sum=True) - if fix_all == "True": - # Only free parameters will be [parameter, aeff_scale] - corresponding to a statistical fit - free_params = model.params.free.names - for free_param in free_params: - if free_param not in ["aeff_scale", "theta23", "deltam31"]: - model.params[free_param].is_fixed = True - if parameter == "theta23": - model.params.theta23.is_fixed = True - model.params.theta23.nominal_value = ( - float(theta23_value) * ureg.degree - ) - elif parameter == "deltam31": - model.params.deltam31.is_fixed = True - model.params.deltam31.nominal_value = ( - float(deltam31_value) * ureg.electron_volt**2 - ) - else: - # Only fixed parameters will be [parameter] - if parameter == "theta23": - model.params.theta23.is_fixed = True - model.params.theta23.nominal_value = ( - float(theta23_value) * ureg.degree - ) - elif parameter == "deltam31": - model.params.deltam31.is_fixed = True - model.params.deltam31.nominal_value = ( - float(deltam31_value) * ureg.electron_volt**2 - ) - else: - print("parameter not supported: %s" % parameter) - model.reset_all() - result = ana.fit_hypo( - data, - model, - metric="mod_chi2", - minimizer_settings=minimizer_cfg, - fit_octants_separately=True, - ) - results.append( - [ - theta23_value, - deltam31_value, - result[0]["params"].theta23.value, - result[0]["params"].deltam31.value, - result[0]["metric_val"], - model_name, - id, - result[0]["minimizer_metadata"]["success"], - ] - ) - self._save_temporary_results( - outdir=outdir, run_name=run_name, results=results, id=id - ) - - def _save_temporary_results( - self, outdir: str, run_name: str, results: List[List[Any]], id: int - ) -> None: - os.makedirs(outdir + "/" + run_name + "/tmp", exist_ok=True) - results_df = pd.DataFrame( - data=results, - columns=[ - "theta23_fixed", - "dm31_fixed", - "theta23_best_fit", - "dm31_best_fit", - "mod_chi2", - "model", - "id", - "converged", - ], - ) - results_df.to_csv( - outdir + "/" + run_name + "/tmp" + "/tmp_%s.csv" % id - ) - - def _make_2d_settings( - self, - cfgs: Dict, - grid_size: int, - run_name: str, - minimizer_cfg: str, - theta23_range: Tuple[float, float], - dm31_range: Tuple[float, float], - n_workers: int, - ) -> List[np.ndarray]: - settings = [] - count = 0 - for model_name in cfgs.keys(): - for i in range(0, grid_size): - for k in range(0, grid_size): - settings.append( - [ - cfgs[model_name], - model_name, - self._outdir, - theta23_range[i], - dm31_range[k], - count, - run_name, - self._statistical_fit, - minimizer_cfg, - ] - ) - count += 1 - random.shuffle(settings) - return np.array_split(settings, n_workers) - - def _make_1d_settings( - self, - cfgs: Dict, - grid_size: int, - run_name: str, - minimizer_cfg: str, - theta23_range: Tuple[float, float], - dm31_range: Tuple[float, float], - n_workers: int, - ) -> List[np.ndarray]: - settings = [] - count = 0 - for model_name in cfgs.keys(): - for i in range(0, grid_size): - settings.append( - [ - cfgs[model_name], - model_name, - self._outdir, - theta23_range[i], - -1, - count, - run_name, - "theta23", - self._statistical_fit, - minimizer_cfg, - ] - ) - count += 1 - for i in range(0, grid_size): - settings.append( - [ - cfgs[model_name], - model_name, - self._outdir, - -1, - dm31_range[i], - count, - run_name, - "deltam31", - self._statistical_fit, - minimizer_cfg, - ] - ) - count += 1 - random.shuffle(settings) - return np.array_split(settings, n_workers) - - def _setup_config_files(self, run_name: str, config_dict: Dict) -> Dict: - cfgs = {} - cfgs[self._model_name] = self._make_configs( - outdir=self._outdir, - post_fix=self._post_fix, - run_name=run_name, - is_retro=False, - pipeline_path=self._pipeline_path, - config_dict=config_dict, - ) - if self._include_retro: - cfgs["retro"] = self._make_configs( - outdir=self._outdir, - post_fix=self._post_fix, - run_name=run_name, - is_retro=True, - pipeline_path=self._pipeline_path, - config_dict=config_dict, - ) - return cfgs - - def _get_minimizer_path(self, config_dict: Optional[Dict]) -> str: - if config_dict is not None and "minimizer_cfg" in config_dict.keys(): - minimizer_cfg = config_dict["minimizer_cfg"] - else: - root = os.path.realpath( - os.path.join(os.getcwd(), os.path.dirname(__file__)) - ) - minimizer_cfg = ( - root + "/resources/minimizer/graphnet_standard.json" - ) - return minimizer_cfg - - def _create_configs(self, config_dict: Dict, path: str) -> str: - # Update binning config - root = os.path.realpath( - os.path.join(os.getcwd(), os.path.dirname(__file__)) - ) - if config_dict["post_fix"] is not None: - config_name = "config%s" % config_dict["post_fix"] - else: - config_name = "config" - - with config_updater( - root - + "/resources/configuration_templates/binning_config_template.cfg", - "%s/binning_%s.cfg" % (path, config_name), - dummy_section="binning", - ) as updater: - updater["binning"][ - "graphnet_dynamic_binning.reco_energy" - ].value = ( - "{'num_bins':%s, 'is_log':True, 'domain':[0.5,55] * units.GeV, 'tex': r'E_{\\rm reco}'}" - % config_dict["reco_energy"]["num_bins"] - ) # noqa: W605 - updater["binning"][ - "graphnet_dynamic_binning.reco_coszen" - ].value = ( - "{'num_bins':%s, 'is_lin':True, 'domain':[-1,1], 'tex':r'\\cos{\\theta}_{\\rm reco}'}" - % config_dict["reco_coszen"]["num_bins"] - ) # noqa: W605 - updater["binning"]["graphnet_dynamic_binning.pid"].value = ( - "{'bin_edges': %s, 'tex':r'{\\rm PID}'}" - % config_dict["pid"]["bin_edges"] - ) # noqa: W605 - updater["binning"]["true_allsky_fine.true_energy"].value = ( - "{'num_bins':%s, 'is_log':True, 'domain':[1,1000] * units.GeV, 'tex': r'E_{\\rm true}'}" - % config_dict["true_energy"]["num_bins"] - ) # noqa: W605 - updater["binning"]["true_allsky_fine.true_coszen"].value = ( - "{'num_bins':%s, 'is_lin':True, 'domain':[-1,1], 'tex':r'\\cos\,\\theta_{Z,{\\rm true}}'}" # noqa: W605 - % config_dict["true_coszen"]["num_bins"] - ) # noqa: W605 - - # Update pipeline config - with config_updater( - root - + "/resources/configuration_templates/pipeline_config_template.cfg", - "%s/pipeline_%s.cfg" % (path, config_name), - ) as updater: - updater["pipeline"].add_before.comment( - "#include %s/binning_%s.cfg as binning" % (path, config_name) - ) - updater["data.sqlite_loader"]["post_fix"].value = config_dict[ - "post_fix" - ] - updater["data.sqlite_loader"]["database"].value = config_dict[ - "pipeline" - ] - if "livetime" in config_dict.keys(): - updater["aeff.aeff"]["param.livetime"].value = ( - "%s * units.common_year" % config_dict["livetime"] - ) - return "%s/pipeline_%s.cfg" % (path, config_name) - - def _make_configs( - self, - outdir: str, - run_name: str, - is_retro: bool, - pipeline_path: str, - post_fix: str = "_pred", - config_dict: Optional[Dict] = None, - ) -> str: - os.makedirs(outdir + "/" + run_name, exist_ok=True) - if config_dict is None: - # Run on standard settings - config_dict = { - "reco_energy": {"num_bins": 8}, - "reco_coszen": {"num_bins": 8}, - "pid": {"bin_edges": [0, 0.5, 1]}, - "true_energy": {"num_bins": 200}, - "true_coszen": {"num_bins": 200}, - "livetime": 10, - } - - config_dict["pipeline"] = pipeline_path - if is_retro: - config_dict["post_fix"] = "_retro" - else: - config_dict["post_fix"] = post_fix - pipeline_cfg_path = self._create_configs( - config_dict, outdir + "/" + run_name - ) - return pipeline_cfg_path
    - -
    - -
    -
    -
    -
    -
    -
    - - -
    - - - - \ No newline at end of file diff --git a/_modules/graphnet/pisa/plotting.html b/_modules/graphnet/pisa/plotting.html deleted file mode 100644 index 530683a56..000000000 --- a/_modules/graphnet/pisa/plotting.html +++ /dev/null @@ -1,542 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - graphnet.pisa.plotting — graphnet documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Skip to content -
    - -
    - - -
    - - - - -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -
    -
    - - -
    -
    -
    - -
    -
    - -

    Source code for graphnet.pisa.plotting

    -"""Functions for plotting contours from PISA fits."""
    -
    -from typing import Any, Dict, List, Tuple
    -
    -import matplotlib.pyplot as plt
    -from matplotlib.figure import Figure
    -import numpy as np
    -import pandas as pd
    -
    -
    -
    -[docs] -def read_entry(entry: Dict) -> Tuple[Any, ...]: - """Parse the contents of `entry`.""" - path = entry["path"] - model_name = entry["model"] - try: - label = entry["label"] - except KeyError: - label = path.split("/")[-2] - try: - ls = entry["linestyle"] - except KeyError: - ls = "-" - try: - color = entry["color"] - except KeyError: - color = None - entry_data = pd.read_csv(path) - - return entry_data, model_name, label, ls, color
    - - - -
    -[docs] -def plot_2D_contour( - contour_data: List[Dict], - xlim: Tuple[float, float] = (0.4, 0.6), - ylim: Tuple[float, float] = (2.38 * 1e-3, 2.55 * 1e-3), - chi2_critical_value: float = 4.605, - width: float = 3.176, - height: float = 2.388, -) -> Figure: - """Plot 2D contours from GraphNeT PISA fits. - - Args: - contour_data: List of dictionaries with plotting information. Format is - for each dictionary is: - {'path': path_to_pisa_fit_result, - 'model': 'name_of_my_model_in_fit'}. - One can specify optional fields in the dictionary: "label" - the - legend label, "color" - the color of the contour, "linestyle" - the - style of the contour line. - xlim: Lower and upper bound of x-axis. - ylim: Lower and upper bound of y-axis. - chi2_critical_value: The critical value of the chi2 fits. Defaults to - 4.605 (90% CL). @NOTE: This, and the below, can't both be right. - width: width of figure in inches. - height: height of figure in inches. - - Returns: - The figure with contours. - """ - fig, ax = plt.subplots(figsize=(width, height), constrained_layout=True) - proxy = [] - labels = [] - for entry in contour_data: - entry_data, model_name, label, ls, color = read_entry(entry) - model_idx = entry_data["model"] == model_name - model_data = entry_data.loc[model_idx] - x = pd.unique(model_data.sort_values("theta23_fixed")["theta23_fixed"]) - y = pd.unique(model_data.sort_values("dm31_fixed")["dm31_fixed"]) - z = np.zeros((len(y), len(x))) - for i in range(len(x)): - for k in range(len(y)): - idx = (model_data["theta23_fixed"] == x[i]) & ( - model_data["dm31_fixed"] == y[k] - ) - match = model_data["mod_chi2"][idx] - if len(match) > 0: - if model_data["converged"][idx].values is True: - match = float(match) - else: - match = 10000 # Sets the z value very high to exclude it from contour - else: - match = 10000 # Sets the z value very high to exclude it from contour - z[k, i] = match - - CS = ax.contour( - np.sin(np.deg2rad(x)) ** 2, - y, - z, - levels=[chi2_critical_value], - colors=color, - label=label, - linestyles=ls, - linewidths=2, - ) - # ax.clabel(CS, inline=1, fontsize=10) - proxy.extend( - [plt.Rectangle((0, 0), 1, 1, fc=color) for pc in CS.collections] - ) - if chi2_critical_value == 4.605: - label = label + " 90 $\\%$ CL" - labels.append(label) - plt.legend(proxy, labels, frameon=False, loc="upper right") - plt.xlim(xlim[0], xlim[1]) - plt.ylim(ylim[0], ylim[1]) - plt.xlabel("$\\sin^2(\\theta_{23})$", fontsize=12) - plt.ylabel("$\\Delta m_{31}^2 [eV^2]$", fontsize=12) - plt.ticklabel_format(axis="y", style="sci", scilimits=(0, 0)) - plt.title("Sensitivity (Simplified Analysis)") - return fig
    - - - -
    -[docs] -def plot_1D_contour( - contour_data: List[Dict], - chi2_critical_value: float = 2.706, - width: float = 2 * 3.176, - height: float = 2.388, -) -> Figure: - """Plot 1D contours from GraphNeT PISA fits. - - Args: - contour_data: List of dictionaries with plotting information. Format is - for each dictionary is: - {'path': path_to_pisa_fit_result, - 'model': 'name_of_my_model_in_fit'}. - One can specify optional fields in the dictionary: "label" - the - legend label, "color" - the color of the contour, "linestyle" - the - style of the contour line. - chi2_critical_value: The critical value of the chi2 fits. Defaults to - 2.706 (90% CL). @NOTE: This, and the above, can't both be right. - width: width of figure in inches. - height: height of figure in inches. - - Returns: - The figure with contours. - """ - variables = ["theta23_fixed", "dm31_fixed"] - fig, ax = plt.subplots( - 1, 2, figsize=(width, height), constrained_layout=True - ) - ls = 0 - for entry in contour_data: - entry_data, model_name, label, ls, color = read_entry(entry) - for variable in variables: - model_idx = entry_data["model"] == model_name - padding_idx = entry_data[variable] != -1 - chi2_idx = entry_data["mod_chi2"] < chi2_critical_value - model_data = entry_data.loc[ - (model_idx) & (padding_idx) & (chi2_idx), : - ] - x = model_data.sort_values(variable) - if variable == "theta23_fixed": - ax[0].set_ylabel("$\\chi^2$", fontsize=12) - ax[0].plot( - np.sin(np.deg2rad(x[variable])) ** 2, - x["mod_chi2"], - color=color, - label=label, - ls=ls, - ) - ax[0].set_xlabel("$\\sin(\\theta_{23})^2$", fontsize=12) - elif variable == "dm31_fixed": - ax[1].plot( - x[variable], x["mod_chi2"], color=color, label=label, ls=ls - ) - ax[1].ticklabel_format(axis="x", style="sci", scilimits=(0, 0)) - ax[1].set_xlabel("$\\Delta m_{31}^2 [eV^2]$", fontsize=12) - h = [item.get_text() for item in ax[1].get_yticklabels()] - empty_string_labels = [""] * len(h) - ax[1].set_yticklabels(empty_string_labels) - ax[0].set_ylim(0, chi2_critical_value) - ax[1].set_ylim(0, chi2_critical_value) - ax[0].legend() - return fig
    - -
    - -
    -
    -
    -
    -
    -
    - - -
    - - - - \ No newline at end of file diff --git a/_modules/graphnet/training/callbacks.html b/_modules/graphnet/training/callbacks.html index 7bea62b10..223ead861 100644 --- a/_modules/graphnet/training/callbacks.html +++ b/_modules/graphnet/training/callbacks.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -640,7 +667,7 @@

    Source code for graphnet Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/training/labels.html b/_modules/graphnet/training/labels.html index 0da6b1526..5440be6b8 100644 --- a/_modules/graphnet/training/labels.html +++ b/_modules/graphnet/training/labels.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -398,6 +425,45 @@

    Source code for graphnet.tr z = torch.cos(graph[self._zenith_key]).reshape(-1, 1) return torch.cat((x, y, z), dim=1) + + +
    +[docs] +class Track(Label): + """Class for producing NuMuCC label. + + Label is set to `1` if the event is a NuMu CC event, else `0`. + """ + + def __init__( + self, + key: str = "track", + pid_key: str = "pid", + interaction_key: str = "interaction_type", + ): + """Construct `Track` label. + + Args: + key: The name of the field in `Data` where the label will be + stored. That is, `graph[key] = label`. + pid_key: The name of the pre-existing key in `graph` that will + be used to access the pdg encoding, used when calculating + the direction. + interaction_key: The name of the pre-existing key in `graph` that + will be used to access the interaction type (1 denoting CC), + used when calculating the direction. + """ + self._pid_key = pid_key + self._int_key = interaction_key + + # Base class constructor + super().__init__(key=key) + + def __call__(self, graph: Data) -> torch.tensor: + """Compute label for `graph`.""" + label = (graph[self._pid_key] == 14) & (graph[self._int_key] == 1) + return label.type(torch.int)
    + @@ -422,7 +488,7 @@

    Source code for graphnet.tr Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/training/loss_functions.html b/_modules/graphnet/training/loss_functions.html index a5dab2a8f..2b907c806 100644 --- a/_modules/graphnet/training/loss_functions.html +++ b/_modules/graphnet/training/loss_functions.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -842,7 +869,7 @@

    Source code for gra Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/training/utils.html b/_modules/graphnet/training/utils.html index b2e9890bd..249ad1eb7 100644 --- a/_modules/graphnet/training/utils.html +++ b/_modules/graphnet/training/utils.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -506,7 +533,7 @@

    Source code for graphnet.tra truth_table=truth_table, index_column=index_column, ) - elif db.endswith(".parquet"): + else: dataset = ParquetDataset( path=db, graph_definition=graph_definition, @@ -516,10 +543,6 @@

    Source code for graphnet.tra truth_table=truth_table, index_column=index_column, ) - else: - raise RuntimeError( - f"File {db} with format {db.split('.'[-1])} not supported." - ) selection = dataset._get_all_indices() # Perform train/validation split @@ -701,7 +724,7 @@

    Source code for graphnet.tra Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/training/weight_fitting.html b/_modules/graphnet/training/weight_fitting.html index 5c42395a7..6d39cf0a4 100644 --- a/_modules/graphnet/training/weight_fitting.html +++ b/_modules/graphnet/training/weight_fitting.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -564,7 +591,7 @@

    Source code for gra Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/argparse.html b/_modules/graphnet/utilities/argparse.html index f46e09075..05cfe51ff 100644 --- a/_modules/graphnet/utilities/argparse.html +++ b/_modules/graphnet/utilities/argparse.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -515,7 +542,7 @@

    Source code for graphnet Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/config/base_config.html b/_modules/graphnet/utilities/config/base_config.html index 32d6f50c1..94622f99a 100644 --- a/_modules/graphnet/utilities/config/base_config.html +++ b/_modules/graphnet/utilities/config/base_config.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -435,7 +462,7 @@

    Source code fo Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/config/configurable.html b/_modules/graphnet/utilities/config/configurable.html index eb58aed73..eb5b50ae7 100644 --- a/_modules/graphnet/utilities/config/configurable.html +++ b/_modules/graphnet/utilities/config/configurable.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -394,7 +421,7 @@

    Source code f Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/config/dataset_config.html b/_modules/graphnet/utilities/config/dataset_config.html index 743743b2e..654f2c796 100644 --- a/_modules/graphnet/utilities/config/dataset_config.html +++ b/_modules/graphnet/utilities/config/dataset_config.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -623,7 +650,7 @@

    Source code Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/config/model_config.html b/_modules/graphnet/utilities/config/model_config.html index e990ed616..f70455f80 100644 --- a/_modules/graphnet/utilities/config/model_config.html +++ b/_modules/graphnet/utilities/config/model_config.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -713,7 +740,7 @@

    Source code f Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/config/parsing.html b/_modules/graphnet/utilities/config/parsing.html index 0bdfbcebc..a900ee833 100644 --- a/_modules/graphnet/utilities/config/parsing.html +++ b/_modules/graphnet/utilities/config/parsing.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -461,7 +488,7 @@

    Source code for gr Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/config/training_config.html b/_modules/graphnet/utilities/config/training_config.html index dda012503..0677ef70d 100644 --- a/_modules/graphnet/utilities/config/training_config.html +++ b/_modules/graphnet/utilities/config/training_config.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -364,7 +391,7 @@

    Source cod Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/deprecation_tools.html b/_modules/graphnet/utilities/deprecation_tools.html index df8e2670d..d455ba74a 100644 --- a/_modules/graphnet/utilities/deprecation_tools.html +++ b/_modules/graphnet/utilities/deprecation_tools.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -382,7 +409,7 @@

    Source code for Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/filesys.html b/_modules/graphnet/utilities/filesys.html index 5a4640792..1cd7374ba 100644 --- a/_modules/graphnet/utilities/filesys.html +++ b/_modules/graphnet/utilities/filesys.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -415,7 +442,8 @@

    Source code for graphnet. folder_i3_files = list(filter(is_i3_file, folder_files)) folder_gcd_files = list(filter(is_gcd_file, folder_files)) - # Make sure that no more than one GCD file is found; and use rescue file of none is found. + # Make sure that no more than one GCD file is found; + # and use rescue file if none is found. assert len(folder_gcd_files) <= 1 if len(folder_gcd_files) == 0: assert gcd_rescue is not None @@ -453,7 +481,7 @@

    Source code for graphnet. Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/imports.html b/_modules/graphnet/utilities/imports.html index 67929efcd..6726c5d70 100644 --- a/_modules/graphnet/utilities/imports.html +++ b/_modules/graphnet/utilities/imports.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -363,22 +390,6 @@

    Source code for graphnet. -
    -[docs] -def has_pisa_package() -> bool: - """Check whether the `pisa` package is available.""" - try: - import pisa # pyright: reportMissingImports=false - - return True - except ImportError: - Logger(log_folder=None).warning_once( - "`pisa` not available. Some functionality may be missing.", - ) - return False
    - - -
    [docs] def requires_icecube(test_function: Callable) -> Callable: @@ -420,7 +431,7 @@

    Source code for graphnet.

    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/logging.html b/_modules/graphnet/utilities/logging.html index f4b14db28..271fbfd42 100644 --- a/_modules/graphnet/utilities/logging.html +++ b/_modules/graphnet/utilities/logging.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -630,7 +657,7 @@

    Source code for graphnet. Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/graphnet/utilities/maths.html b/_modules/graphnet/utilities/maths.html index cff801128..61a5fae36 100644 --- a/_modules/graphnet/utilities/maths.html +++ b/_modules/graphnet/utilities/maths.html @@ -122,10 +122,9 @@ - + - @@ -281,14 +280,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -357,7 +384,7 @@

    Source code for graphnet.ut Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/_modules/index.html b/_modules/index.html index b9ec07420..f33d5ba19 100644 --- a/_modules/index.html +++ b/_modules/index.html @@ -122,10 +122,9 @@ - + - @@ -280,14 +279,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -322,6 +349,7 @@

    All modules for which code is available

    @@ -401,6 +430,8 @@
  • PROMETHEUS
  • KAGGLE +
  • +
  • LIQUIDO
  • @@ -448,6 +479,13 @@ KAGGLE + +
  • + + + LIQUIDO + +
  • @@ -490,10 +528,24 @@ KAGGLE + +
  • + + + LIQUIDO + +
  • + +
  • + + + curated_datamodule + +
  • @@ -522,15 +574,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -542,14 +594,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -603,6 +655,8 @@
  • PROMETHEUS
  • KAGGLE +
  • +
  • LIQUIDO
  • @@ -617,6 +671,8 @@
  • PROMETHEUS
  • KAGGLE +
  • +
  • LIQUIDO
  • @@ -659,6 +715,10 @@
    KAGGLE = ['x', 'y', 'z', 'time', 'charge', 'auxiliary']
    +
    +
    +LIQUIDO = ['sipm_x', 'sipm_y', 'sipm_z', 't']
    +
    @@ -667,15 +727,15 @@

    Namespace for standard names working with I3TruthExtractor.

    -ICECUBE86 = ['energy', 'energy_track', 'energy_cascade', 'position_x', 'position_y', 'position_z', 'azimuth', 'zenith', 'pid', 'elasticity', 'sim_type', 'interaction_type', 'interaction_time', 'inelasticity', 'stopped_muon']
    +ICECUBE86 = ['energy', 'energy_track', 'energy_cascade', 'position_x', 'position_y', 'position_z', 'azimuth', 'zenith', 'pid', 'elasticity', 'interaction_type', 'interaction_time', 'inelasticity', 'stopped_muon']
    -DEEPCORE = ['energy', 'energy_track', 'energy_cascade', 'position_x', 'position_y', 'position_z', 'azimuth', 'zenith', 'pid', 'elasticity', 'sim_type', 'interaction_type', 'interaction_time', 'inelasticity', 'stopped_muon']
    +DEEPCORE = ['energy', 'energy_track', 'energy_cascade', 'position_x', 'position_y', 'position_z', 'azimuth', 'zenith', 'pid', 'elasticity', 'interaction_type', 'interaction_time', 'inelasticity', 'stopped_muon']
    -UPGRADE = ['energy', 'energy_track', 'energy_cascade', 'position_x', 'position_y', 'position_z', 'azimuth', 'zenith', 'pid', 'elasticity', 'sim_type', 'interaction_type', 'interaction_time', 'inelasticity', 'stopped_muon']
    +UPGRADE = ['energy', 'energy_track', 'energy_cascade', 'position_x', 'position_y', 'position_z', 'azimuth', 'zenith', 'pid', 'elasticity', 'interaction_type', 'interaction_time', 'inelasticity', 'stopped_muon']
    @@ -685,6 +745,10 @@
    KAGGLE = ['zenith', 'azimuth']
    +
    +
    +LIQUIDO = ['vertex_x', 'vertex_y', 'vertex_z', 'zenith', 'azimuth', 'interaction_time', 'energy', 'pid']
    +
    @@ -712,12 +776,12 @@ - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.curated_datamodule.html b/api/graphnet.data.curated_datamodule.html new file mode 100644 index 000000000..fa2532030 --- /dev/null +++ b/api/graphnet.data.curated_datamodule.html @@ -0,0 +1,947 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + curated_datamodule — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    + + +
    +
    + +
    +

    curated_datamodule

    +

    Contains a Generic class for curated DataModules/Datasets.

    +

    Inheriting subclasses are data-specific implementations that allow the user to +import and download pre-converteddatasets for training of deep learning based +methods in GraphNeT.

    +
    +
    +class graphnet.data.curated_datamodule.CuratedDataset(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: GraphNeTDataModule

    +

    Generic base class for curated datasets.

    +

    Curated Datasets in GraphNeT are pre-converted datasets that have been +prepared for training and evaluation of deep learning models. On these +Datasets, graphnet users can train and benchmark their models against SOTA +methods.

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    +abstract prepare_data()[source]
    +

    Download and prepare data.

    +
    +
    Return type:
    +

    None

    +
    +
    +
    +
    +
    +description()[source]
    +

    Print details on the Dataset.

    +
    +
    Return type:
    +

    None

    +
    +
    +
    +
    +
    +property pulsemaps: List[str]
    +

    Produce a list of available pulsemaps in Dataset.

    +
    +
    +
    +property truth_table: List[str]
    +

    Produce name of table containing event-level truth in Dataset.

    +
    +
    +
    +property event_truth: List[str]
    +

    Produce a list of available event-level truth in Dataset.

    +
    +
    +
    +property pulse_truth: List[str] | None
    +

    Produce a list of available pulse-level truth in Dataset.

    +
    +
    +
    +property features: List[str]
    +

    Produce a list of available input features in Dataset.

    +
    +
    +
    +property experiment: str
    +

    Produce the name of the experiment that the data comes from.

    +
    +
    +
    +property citation: str
    +

    Produce a string that describes how to cite this Dataset.

    +
    +
    +
    +property comments: str
    +

    Produce comments on the dataset from the creator.

    +
    +
    +
    +property creator: str
    +

    Produce name of person who created the Dataset.

    +
    +
    +
    +property events: Dict[str, int]
    +

    Produce a dict that contains number events in each selection.

    +
    +
    +
    +property available_backends: List[str]
    +

    Produce a list of available data formats that the data comes in.

    +
    +
    +
    +property dataset_dir: str
    +

    Produce path directory that contains dataset files.

    +
    +
    +
    +
    +class graphnet.data.curated_datamodule.ERDAHostedDataset(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: CuratedDataset

    +

    A base class for dataset/datamodule hosted at ERDA.

    +

    Inheriting subclasses will just need to fill out the _file_hashes +attribute, which points to the file-id of a ERDA-hosted sharelink. It +is assumed that sharelinks point to a single compressed file that has +been compressed using tar with extension “.tar.gz”.

    +

    E.g. suppose that the sharelink below +https://sid.erda.dk/share_redirect/FbEEzAbg5A +points to a compressed sqlite database. Then: +_file_hashes = {‘sqlite’ : “FbEEzAbg5A”}

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    +prepare_data()[source]
    +

    Prepare the dataset for training.

    +
    +
    Return type:
    +

    None

    +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.dataclasses.html b/api/graphnet.data.dataclasses.html index 9a0ff4708..2c6708832 100644 --- a/api/graphnet.data.dataclasses.html +++ b/api/graphnet.data.dataclasses.html @@ -123,14 +123,13 @@ - + - - + @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -368,6 +395,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -486,15 +520,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -506,14 +540,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -597,8 +631,8 @@
    Parameters:
      -
    • i3_file (str) –

    • -
    • gcd_file (str) –

    • +
    • i3_file (str)

    • +
    • gcd_file (str)

    @@ -619,10 +653,10 @@
    Parameters:
      -
    • i3_files (List[str]) –

    • -
    • gcd_file (str) –

    • -
    • output_folder (str) –

    • -
    • modules (List[Any]) –

    • +
    • i3_files (List[str])

    • +
    • gcd_file (str)

    • +
    • output_folder (str)

    • +
    • modules (List[Any])

    @@ -655,7 +689,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataloader.html b/api/graphnet.data.dataloader.html index a292c039d..e86d352ef 100644 --- a/api/graphnet.data.dataloader.html +++ b/api/graphnet.data.dataloader.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -368,6 +395,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -448,15 +482,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -468,14 +502,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -552,7 +586,7 @@

    Batch

    Parameters:
    -

    graphs (List[Data]) –

    +

    graphs (List[Data])

    @@ -565,7 +599,7 @@

    bool

    Parameters:
    -

    selection_name (str) –

    +

    selection_name (str)

    @@ -578,14 +612,14 @@
    Parameters:
      -
    • dataset (Dataset[T_co]) –

    • -
    • batch_size (int | None) –

    • -
    • shuffle (bool) –

    • -
    • num_workers (int) –

    • -
    • persistent_workers (bool) –

    • -
    • collate_fn (Callable) –

    • -
    • prefetch_factor (int | None) –

    • -
    • kwargs (Any) –

    • +
    • dataset (Dataset[T_co])

    • +
    • batch_size (int | None)

    • +
    • shuffle (bool)

    • +
    • num_workers (int)

    • +
    • persistent_workers (bool)

    • +
    • collate_fn (Callable)

    • +
    • prefetch_factor (int | None)

    • +
    • kwargs (Any)

    @@ -599,8 +633,8 @@
    Parameters:
    @@ -655,7 +689,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.datamodule.html b/api/graphnet.data.datamodule.html index 4c98ffe66..bd8465a7f 100644 --- a/api/graphnet.data.datamodule.html +++ b/api/graphnet.data.datamodule.html @@ -123,13 +123,12 @@ - + - - + @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -368,6 +395,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -475,15 +509,15 @@
  • + +
  • - pipeline + datasets -
  • -
  • @@ -495,14 +529,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -584,20 +618,21 @@
    Parameters:
      -
    • dataset_reference (Union[SQLiteDataset, ParquetDataset, Dataset]) – A non-instantiated reference +

    • dataset_reference (Union[Type[SQLiteDataset], Type[ParquetDataset], Type[Dataset]]) – A non-instantiated reference to the dataset class.

    • dataset_args (Dict[str, Any]) – Arguments to instantiate graphnet.data.dataset.Dataset with.

    • selection (Union[List[int], List[List[int]], None], default: None) – (Optional) a list of event id’s used for training and validation, Default None.

    • test_selection (Union[List[int], List[List[int]], None], default: None) – (Optional) a list of event id’s used for testing, -Default None.

    • +Defaults to None.

    • train_dataloader_kwargs (Optional[Dict[str, Any]], default: None) – Arguments for the training DataLoader, -Default None.

    • +Defaults{“batch_size”: 2, “num_workers”: 1}.

    • validation_dataloader_kwargs (Optional[Dict[str, Any]], default: None) – Arguments for the validation -DataLoader, Default None.

    • +DataLoader. Defaults to +train_dataloader_kwargs.

    • test_dataloader_kwargs (Optional[Dict[str, Any]], default: None) – Arguments for the test DataLoader, -Default None.

    • +Defaults to train_dataloader_kwargs.

    • train_val_split (Optional) – Split ratio for training and validation sets. Default is [0.9, 0.10].

    • split_seed (int, default: 42) – seed used for shuffling and splitting selections into @@ -630,7 +665,7 @@

    -property train_dataloader: DataLoader
    +property train_dataloader: DataLoader

    Prepare and return the training DataLoader.

    Returns:
    @@ -643,7 +678,7 @@
    -property val_dataloader: DataLoader
    +property val_dataloader: DataLoader

    Prepare and return the validation DataLoader.

    Returns:
    @@ -656,7 +691,7 @@
    -property test_dataloader: DataLoader
    +property test_dataloader: DataLoader

    Prepare and return the test DataLoader.

    Returns:
    @@ -709,12 +744,12 @@ - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataset.dataset.html b/api/graphnet.data.dataset.dataset.html index 3d311fc7b..264998e96 100644 --- a/api/graphnet.data.dataset.dataset.html +++ b/api/graphnet.data.dataset.dataset.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -340,8 +367,6 @@ -
  • @@ -562,14 +587,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -612,8 +637,6 @@
    @@ -696,7 +713,7 @@

    Dict[str, Label]

    Parameters:
    -

    cfg (dict) –

    +

    cfg (dict)

    @@ -752,8 +769,8 @@ events ~ event_no % 5 > 0”).

  • graph_definition (GraphDefinition) – Method that defines the graph representation.

  • labels (Optional[Dict[str, Any]], default: None) – Dictionary of labels to be added to the dataset.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -769,7 +786,7 @@

    Union[Dataset, EnsembleDataset, Dict[str, Dataset], Dict[str, EnsembleDataset]]

    Parameters:
    -

    source (DatasetConfig | str) –

    +

    source (DatasetConfig | str)

    @@ -782,7 +799,7 @@

    EnsembleDataset

    Parameters:
    -

    datasets (List[Dataset]) –

    +

    datasets (List[Dataset])

    @@ -814,7 +831,7 @@
    Return type:
    -

    List[Tuple[Any, ...]]

    +

    ndarray

    Returns:

    @@ -825,7 +842,7 @@

    Raises:
    -

    ColumnMissingException – If one or more element in columns is not +

    ColumnMissingException – If one or more element in columns is not present in table.

    @@ -840,8 +857,8 @@
    Parameters:
    @@ -908,7 +925,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataset.html b/api/graphnet.data.dataset.html index 9aa7ec0ad..ea9dde565 100644 --- a/api/graphnet.data.dataset.html +++ b/api/graphnet.data.dataset.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -398,6 +425,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -426,15 +460,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -446,14 +480,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -522,7 +556,6 @@
    Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataset.parquet.html b/api/graphnet.data.dataset.parquet.html index 19a432558..49717e54e 100644 --- a/api/graphnet.data.dataset.parquet.html +++ b/api/graphnet.data.dataset.parquet.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -406,6 +433,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -434,15 +468,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -454,14 +488,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -571,7 +605,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataset.parquet.parquet_dataset.html b/api/graphnet.data.dataset.parquet.parquet_dataset.html index 3cff0e856..fc8339df5 100644 --- a/api/graphnet.data.dataset.parquet.parquet_dataset.html +++ b/api/graphnet.data.dataset.parquet.parquet_dataset.html @@ -123,10 +123,9 @@ - + - @@ -287,14 +286,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -432,6 +459,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -460,15 +494,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -480,14 +514,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -549,17 +583,21 @@

    parquet_dataset

    -

    Dataset class(es) for reading from Parquet files.

    +

    Base Dataset class(es) used in GraphNeT.

    class graphnet.data.dataset.parquet.parquet_dataset.ParquetDataset(*args, **kwargs)[source]

    Bases: Dataset

    -

    Pytorch dataset for reading from Parquet files.

    +

    Dataset class for Parquet-files converted with ParquetWriter.

    Construct Dataset.

    +
    +

    NOTE: DataLoaders using this Dataset should have +“multiprocessing_context = ‘spawn’” set to avoid thread locking.

    +
    Parameters:
      -
    • path (Union[str, List[str]]) – Path to the file(s) from which this Dataset should read.

    • +
    • path (str) – Path to the file(s) from which this Dataset should read.

    • pulsemaps (Union[str, List[str]]) – Name(s) of the pulse map series that should be used to construct the nodes on the individual graph objects, and their features. Multiple pulse series maps can be used, e.g., when @@ -579,11 +617,8 @@

    • string_selection (Optional[List[int]], default: None) – Subset of strings for which data should be read and used to construct graph objects. Defaults to None, meaning all strings for which data exists are used.

    • -
    • selection (Union[str, List[int], List[List[int]], None], default: None) – The events that should be read. This can be given either -as list of indicies (in index_column); or a string-based -selection used to query the Dataset for events passing the -selection. Defaults to None, meaning that all events in the -input files are read.

    • +
    • selection (Union[str, List[int], List[List[int]], None], default: None) – The batch ids to include in the dataset. +Defaults to None, meaning that batches are read.

    • dtype (dtype, default: torch.float32) – Type of the feature tensor on the graph objects returned.

    • loss_weight_table (Optional[str], default: None) – Name of the table containing per-event loss weights.

    • @@ -601,9 +636,10 @@ “10000 random events ~ event_no % 5 > 0” or “20% random events ~ event_no % 5 > 0”).

    • graph_definition (GraphDefinition) – Method that defines the graph representation.

    • -
    • labels (Optional[Dict[str, Any]], default: None) – Dictionary of labels to be added to the dataset.

    • -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • cache_size (int, default: 1) – Number of batches to cache in memory. +Must be at least 1. Defaults to 1.

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -613,19 +649,35 @@
    query_table(table, columns, sequential_index, selection)[source]
    -

    Query table at a specific index, optionally with some selection.

    +

    Query a table at a specific index, optionally with some selection.

    -
    Return type:
    -

    List[Tuple[Any, ...]]

    -
    -
    Parameters:
    -
      -
    • table (str) –

    • -
    • columns (List[str] | str) –

    • -
    • sequential_index (int | None) –

    • -
    • selection (str | None) –

    • +
      Parameters:
      +
        +
      • table (str) – Table to be queried.

      • +
      • columns (Union[List[str], str]) – Columns to read out.

      • +
      • sequential_index (Optional[int], default: None) – Sequentially numbered index +(i.e. in [0,len(self))) of the event to query. This _may_ +differ from the indexation used in self._indices. If no value +is provided, the entire column is returned.

      • +
      • selection (Optional[str], default: None) – Selection to be imposed before reading out data. +Defaults to None.

      +
      Return type:
      +

      ndarray

      +
      +
      Returns:
      +

      +
      List of tuples containing the values in columns. If the table

      contains only scalar data for columns, a list of length 1 is +returned

      +
      +
      +

      +
      +
      Raises:
      +

      ColumnMissingException – If one or more element in columns is not + present in table.

      +
    @@ -678,7 +730,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataset.sqlite.html b/api/graphnet.data.dataset.sqlite.html index d147d8aaa..3d5ee0258 100644 --- a/api/graphnet.data.dataset.sqlite.html +++ b/api/graphnet.data.dataset.sqlite.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -406,6 +433,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -434,15 +468,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -454,14 +488,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -571,7 +605,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.dataset.sqlite.sqlite_dataset.html b/api/graphnet.data.dataset.sqlite.sqlite_dataset.html index c5c664424..c0ff5ffb0 100644 --- a/api/graphnet.data.dataset.sqlite.sqlite_dataset.html +++ b/api/graphnet.data.dataset.sqlite.sqlite_dataset.html @@ -123,10 +123,9 @@ - + - @@ -287,14 +286,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -432,6 +459,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -460,15 +494,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -480,14 +514,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -602,8 +636,8 @@ events ~ event_no % 5 > 0”).

  • graph_definition (GraphDefinition) – Method that defines the graph representation.

  • labels (Optional[Dict[str, Any]], default: None) – Dictionary of labels to be added to the dataset.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -620,10 +654,10 @@
    Parameters:
      -
    • table (str) –

    • -
    • columns (List[str] | str) –

    • -
    • sequential_index (int | None) –

    • -
    • selection (str | None) –

    • +
    • table (str)

    • +
    • columns (List[str] | str)

    • +
    • sequential_index (int | None)

    • +
    • selection (str | None)

    @@ -678,7 +712,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.pipeline.html b/api/graphnet.data.extractors.combine_extractors.html similarity index 70% rename from api/graphnet.data.pipeline.html rename to api/graphnet.data.extractors.combine_extractors.html index d7167dca9..02c7876a4 100644 --- a/api/graphnet.data.pipeline.html +++ b/api/graphnet.data.extractors.combine_extractors.html @@ -61,7 +61,7 @@ - pipeline — graphnet documentation + combine_extractors — graphnet documentation + + + + + + + + + + + + + + + + internal — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    + +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.extractors.internal.parquet_extractor.html b/api/graphnet.data.extractors.internal.parquet_extractor.html new file mode 100644 index 000000000..c2baa33b5 --- /dev/null +++ b/api/graphnet.data.extractors.internal.parquet_extractor.html @@ -0,0 +1,673 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + parquet_extractor — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    parquet_extractor

    +

    Parquet Extractor for conversion from internal parquet format.

    +
    +
    +class graphnet.data.extractors.internal.parquet_extractor.ParquetExtractor(extractor_name)[source]
    +

    Bases: Extractor

    +

    Class for extracting information from internal GraphNeT parquet files.

    +

    Contains functionality required to extract data from internal parquet +files, i.e files saved using the ParquetWriter. This allows for conversion +between internal data formats.

    +

    Construct ParquetExtractor.

    +
    +
    Parameters:
    +
      +
    • extractor_name (str) – Name of the ParquetExtractor instance.

    • +
    • data (Used to keep track of the provenance of different)

    • +
    +
    +
    +

    :param : +:param and to name tables to which this data is saved.:

    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.extractors.liquido.h5_extractor.html b/api/graphnet.data.extractors.liquido.h5_extractor.html new file mode 100644 index 000000000..e277cc317 --- /dev/null +++ b/api/graphnet.data.extractors.liquido.h5_extractor.html @@ -0,0 +1,713 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + h5_extractor — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    h5_extractor

    +

    H5 Extractor for LiquidO data files.

    +
    +
    +class graphnet.data.extractors.liquido.h5_extractor.H5Extractor(extractor_name, column_names)[source]
    +

    Bases: Extractor

    +

    Class for extracting information from LiquidO h5 files.

    +

    Construct H5Extractor.

    +
    +
    Parameters:
    +
      +
    • extractor_name (str) – Name of the ParquetExtractor instance.

    • +
    • data (Used to keep track of the provenance of different)

    • +
    • column_names (List[str])

    • +
    +
    +
    +

    :param : +:param and to name tables to which this data is saved.: +:type column_names: List[str] +:param column_names: Name of the columns in extractor_name.

    +
    +
    +
    +class graphnet.data.extractors.liquido.h5_extractor.H5HitExtractor[source]
    +

    Bases: H5Extractor

    +

    Extractor for HitData in LiquidO H5 files.

    +

    Extractor for HitData in LiquidO H5 files.

    +
    +
    +
    +
    +
    +class graphnet.data.extractors.liquido.h5_extractor.H5TruthExtractor[source]
    +

    Bases: H5Extractor

    +

    Extractor for TruthData in LiquidO H5 files.

    +

    Extractor for TruthData in LiquidO H5 files.

    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.extractors.liquido.html b/api/graphnet.data.extractors.liquido.html new file mode 100644 index 000000000..a2b3e15f9 --- /dev/null +++ b/api/graphnet.data.extractors.liquido.html @@ -0,0 +1,642 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + liquido — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    + +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.extractors.prometheus.html b/api/graphnet.data.extractors.prometheus.html new file mode 100644 index 000000000..8afc1a6f2 --- /dev/null +++ b/api/graphnet.data.extractors.prometheus.html @@ -0,0 +1,642 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + prometheus — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    + +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.extractors.prometheus.prometheus_extractor.html b/api/graphnet.data.extractors.prometheus.prometheus_extractor.html new file mode 100644 index 000000000..6597c78f4 --- /dev/null +++ b/api/graphnet.data.extractors.prometheus.prometheus_extractor.html @@ -0,0 +1,724 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + prometheus_extractor — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    + + +
    +
    + +
    +

    prometheus_extractor

    +

    Parquet Extractor for conversion of simulation files from PROMETHEUS.

    +
    +
    +class graphnet.data.extractors.prometheus.prometheus_extractor.PrometheusExtractor(extractor_name, columns)[source]
    +

    Bases: Extractor

    +

    Class for extracting information from PROMETHEUS parquet files.

    +

    Contains functionality required to extract data from PROMETHEUS parquet +files.

    +

    Construct PrometheusExtractor.

    +
    +
    Parameters:
    +
      +
    • extractor_name (str) – Name of the PrometheusExtractor instance.

    • +
    • data (Used to keep track of the provenance of different)

    • +
    • columns (List[str])

    • +
    +
    +
    +

    :param : +:param and to name tables to which this data is saved.: +:type columns: List[str] +:param columns: List of column names to extract from the table.

    +
    +
    +
    +class graphnet.data.extractors.prometheus.prometheus_extractor.PrometheusTruthExtractor(table_name)[source]
    +

    Bases: PrometheusExtractor

    +

    Class for extracting event level truth from Prometheus parquet files.

    +

    This Extractor will “initial_state” i.e. neutrino truth.

    +

    Construct PrometheusTruthExtractor.

    +
    +
    Parameters:
    +

    table_name (str, default: 'mc_truth') – Name of the table in the parquet files that contain +event-level truth. Defaults to “mc_truth”.

    +
    +
    +
    +
    +
    +class graphnet.data.extractors.prometheus.prometheus_extractor.PrometheusFeatureExtractor(table_name)[source]
    +

    Bases: PrometheusExtractor

    +

    Class for extracting pulses/photons from Prometheus parquet files.

    +

    Construct PrometheusFeatureExtractor.

    +
    +
    Parameters:
    +

    table_name (str, default: 'photons') – Name of table in parquet files that contain the +photons/pulses. Defaults to “photons”.

    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.html b/api/graphnet.data.html index ea5ca2d92..115adf908 100644 --- a/api/graphnet.data.html +++ b/api/graphnet.data.html @@ -123,10 +123,9 @@ - + - @@ -284,14 +283,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -376,6 +403,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -404,15 +438,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -424,14 +458,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -496,6 +530,10 @@
  • extractors
  • @@ -510,6 +548,9 @@
  • readers
  • sqlite
      @@ -539,6 +580,11 @@
    • TRUTH
  • +
  • curated_datamodule +
  • dataclasses
  • -
  • pipeline -
  • @@ -614,7 +656,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.parquet.deprecated_methods.html b/api/graphnet.data.parquet.deprecated_methods.html index 6d569ad59..d2404f15e 100644 --- a/api/graphnet.data.parquet.deprecated_methods.html +++ b/api/graphnet.data.parquet.deprecated_methods.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -399,6 +426,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -427,15 +461,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -447,14 +481,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -595,7 +629,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.parquet.html b/api/graphnet.data.parquet.html index 8f91610c1..b81779746 100644 --- a/api/graphnet.data.parquet.html +++ b/api/graphnet.data.parquet.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -384,6 +411,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -412,15 +446,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -432,14 +466,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -549,7 +583,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.pre_configured.dataconverters.html b/api/graphnet.data.pre_configured.dataconverters.html index 11a6c31f9..00db438c7 100644 --- a/api/graphnet.data.pre_configured.dataconverters.html +++ b/api/graphnet.data.pre_configured.dataconverters.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -350,6 +377,8 @@
  • I3ToParquetConverter
  • I3ToSQLiteConverter +
  • +
  • ParquetToSQLiteConverter
  • @@ -369,6 +398,13 @@ I3ToSQLiteConverter + +
  • + + + ParquetToSQLiteConverter + +
  • @@ -408,6 +444,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -436,15 +479,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -456,14 +499,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -509,6 +552,8 @@
  • I3ToParquetConverter
  • I3ToSQLiteConverter +
  • +
  • ParquetToSQLiteConverter
  • @@ -588,6 +633,29 @@ +
    +
    +class graphnet.data.pre_configured.dataconverters.ParquetToSQLiteConverter(extractors, outdir, index_column, num_workers)[source]
    +

    Bases: DataConverter

    +

    Preconfigured DataConverter for converting Parquet to SQLite files.

    +

    This class converts Parquet files written by ParquetWriter to SQLite.

    +

    Convert internal Parquet files to SQLite.

    +
    +
    Parameters:
    +
      +
    • extractors (List[ParquetExtractor]) – The `Extractor`(s) that will be applied to the input +files.

    • +
    • outdir (str) – The directory to save the files in.

    • +
    • icetray_verbose – Set the level of verbosity of icetray. +Defaults to 0.

    • +
    • index_column (str, default: 'event_no') – Name of the event id column added to the events. +Defaults to “event_no”.

    • +
    • num_workers (int, default: 1) – The number of CPUs used for parallel processing. +Defaults to 1 (no multiprocessing).

    • +
    +
    +
    +
    @@ -637,7 +705,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.pre_configured.html b/api/graphnet.data.pre_configured.html index af2a6c837..997b98695 100644 --- a/api/graphnet.data.pre_configured.html +++ b/api/graphnet.data.pre_configured.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -384,6 +411,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -412,15 +446,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -432,14 +466,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -497,6 +531,7 @@
  • dataconverters
  • @@ -550,7 +585,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.readers.graphnet_file_reader.html b/api/graphnet.data.readers.graphnet_file_reader.html index 3daaee6ca..4c22c8a31 100644 --- a/api/graphnet.data.readers.graphnet_file_reader.html +++ b/api/graphnet.data.readers.graphnet_file_reader.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -432,6 +459,27 @@ i3reader + +
  • + + + internal_parquet_reader + + +
  • +
  • + + + liquido_reader + + +
  • +
  • + + + prometheus_reader + +
  • @@ -462,6 +510,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -490,15 +545,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -510,14 +565,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -606,11 +661,11 @@
    Parameters:
      -
    • name (str | None) –

    • -
    • class_name (str | None) –

    • -
    • level (int) –

    • -
    • log_folder (str | None) –

    • -
    • kwargs (Any) –

    • +
    • name (str | None)

    • +
    • class_name (str | None)

    • +
    • level (int)

    • +
    • log_folder (str | None)

    • +
    • kwargs (Any)

    @@ -652,7 +707,7 @@

    Set `Extractor`(s) as member variable.

    Parameters:
    -

    extractors (Union[List[Extractor], List[I3Extractor]]) – A list of `Extractor`(s) to set as member variable.

    +

    extractors (Union[List[Extractor], List[I3Extractor], List[ParquetExtractor], List[H5Extractor], List[PrometheusExtractor]]) – A list of `Extractor`(s) to set as member variable.

    Return type:

    None

    @@ -722,7 +777,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.readers.html b/api/graphnet.data.readers.html index d0d0dc99b..d3866f275 100644 --- a/api/graphnet.data.readers.html +++ b/api/graphnet.data.readers.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -361,6 +388,27 @@ i3reader + +
  • + + + internal_parquet_reader + + +
  • +
  • + + + liquido_reader + + +
  • +
  • + + + prometheus_reader + +
  • @@ -391,6 +439,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -419,15 +474,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -439,14 +494,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -509,6 +564,18 @@
  • I3Reader
  • +
  • internal_parquet_reader +
  • +
  • liquido_reader +
  • +
  • prometheus_reader +
  • @@ -560,7 +627,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.readers.i3reader.html b/api/graphnet.data.readers.i3reader.html index 4c3732eac..c90dac6fe 100644 --- a/api/graphnet.data.readers.i3reader.html +++ b/api/graphnet.data.readers.i3reader.html @@ -123,13 +123,12 @@ - + - - + @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -387,6 +414,27 @@ + +
  • + + + internal_parquet_reader + + +
  • +
  • + + + liquido_reader + + +
  • +
  • + + + prometheus_reader + +
  • @@ -417,6 +465,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -445,15 +500,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -465,14 +520,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -604,12 +659,12 @@ - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.pisa.plotting.html b/api/graphnet.data.readers.internal_parquet_reader.html similarity index 54% rename from api/graphnet.pisa.plotting.html rename to api/graphnet.data.readers.internal_parquet_reader.html index 8d9b9e9e8..4f155fada 100644 --- a/api/graphnet.pisa.plotting.html +++ b/api/graphnet.data.readers.internal_parquet_reader.html @@ -61,7 +61,7 @@ - plotting — graphnet documentation + internal_parquet_reader — graphnet documentation + + + + + + + + + + + + + + + + liquido_reader — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    liquido_reader

    +

    Modules for reading data files from LiquidO.

    +
    +
    +class graphnet.data.readers.liquido_reader.LiquidOReader(name, class_name, level, log_folder, **kwargs)[source]
    +

    Bases: GraphNeTFileReader

    +

    A class for reading h5 files from LiquidO.

    +

    Construct Logger.

    +
    +
    Parameters:
    +
      +
    • name (str | None)

    • +
    • class_name (str | None)

    • +
    • level (int)

    • +
    • log_folder (str | None)

    • +
    • kwargs (Any)

    • +
    +
    +
    +
    +
    +find_files(path)[source]
    +

    Search folder(s) for h5 files.

    +
    +
    Parameters:
    +

    path (Union[str, List[str]]) – directory to search for h5 files.

    +
    +
    Return type:
    +

    List[str]

    +
    +
    Returns:
    +

    List of h5 files in the folders.

    +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.readers.prometheus_reader.html b/api/graphnet.data.readers.prometheus_reader.html new file mode 100644 index 000000000..8f3f65f31 --- /dev/null +++ b/api/graphnet.data.readers.prometheus_reader.html @@ -0,0 +1,687 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + prometheus_reader — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    prometheus_reader

    +

    Modules for reading data files from the Prometheus project.

    +
    +
    +class graphnet.data.readers.prometheus_reader.PrometheusReader(name, class_name, level, log_folder, **kwargs)[source]
    +

    Bases: GraphNeTFileReader

    +

    A class for reading parquet files from Prometheus simulation.

    +

    Construct Logger.

    +
    +
    Parameters:
    +
      +
    • name (str | None)

    • +
    • class_name (str | None)

    • +
    • level (int)

    • +
    • log_folder (str | None)

    • +
    • kwargs (Any)

    • +
    +
    +
    +
    +
    +find_files(path)[source]
    +

    Search folder(s) for parquet files.

    +
    +
    Parameters:
    +

    path (Union[str, List[str]]) – directory to search for parquet files.

    +
    +
    Return type:
    +

    List[str]

    +
    +
    Returns:
    +

    List of parquet files in the folders.

    +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.data.sqlite.deprecated_methods.html b/api/graphnet.data.sqlite.deprecated_methods.html index 4f95c689d..804cd9851 100644 --- a/api/graphnet.data.sqlite.deprecated_methods.html +++ b/api/graphnet.data.sqlite.deprecated_methods.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -399,6 +426,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -427,15 +461,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -447,14 +481,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -595,7 +629,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.sqlite.html b/api/graphnet.data.sqlite.html index afd47386c..132f28ddd 100644 --- a/api/graphnet.data.sqlite.html +++ b/api/graphnet.data.sqlite.html @@ -123,14 +123,13 @@ - + - - + @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -384,6 +411,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -412,15 +446,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -432,14 +466,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -512,7 +546,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.utilities.random.html b/api/graphnet.data.utilities.random.html index 6a8420331..9cfed03b3 100644 --- a/api/graphnet.data.utilities.random.html +++ b/api/graphnet.data.utilities.random.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -420,6 +447,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -448,15 +482,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -468,14 +502,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -544,7 +578,7 @@
    • files_list – List of I3 file paths.

    • gcd_list (List[str]) – List of corresponding gcd file paths.

    • -
    • i3_list (List[str]) –

    • +
    • i3_list (List[str])

    Returns:
    @@ -605,7 +639,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.utilities.sqlite_utilities.html b/api/graphnet.data.utilities.sqlite_utilities.html index 9e771ae9c..a2da298c7 100644 --- a/api/graphnet.data.utilities.sqlite_utilities.html +++ b/api/graphnet.data.utilities.sqlite_utilities.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -492,6 +519,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -520,15 +554,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -540,14 +574,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -631,7 +665,7 @@

    bool

    Parameters:
    -

    database_path (str) –

    +

    database_path (str)

    @@ -686,8 +720,8 @@
    Parameters:
    @@ -737,9 +771,9 @@
    Parameters:
    @@ -780,12 +814,12 @@
    Parameters:
    @@ -839,7 +873,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.utilities.string_selection_resolver.html b/api/graphnet.data.utilities.string_selection_resolver.html index 6d1927ebf..7e1256f12 100644 --- a/api/graphnet.data.utilities.string_selection_resolver.html +++ b/api/graphnet.data.utilities.string_selection_resolver.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -431,6 +458,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -459,15 +493,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -479,14 +513,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -594,10 +628,10 @@
    Parameters:
      -
    • dataset (Dataset) –

    • -
    • index_column (str) –

    • -
    • seed (int | None) –

    • -
    • use_cache (bool) –

    • +
    • dataset (Dataset)

    • +
    • index_column (str)

    • +
    • seed (int | None)

    • +
    • use_cache (bool)

    @@ -614,7 +648,7 @@

    List[int]

    Parameters:
    -

    selection (str) –

    +

    selection (str)

    @@ -668,7 +702,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.writers.graphnet_writer.html b/api/graphnet.data.writers.graphnet_writer.html index 86cea8354..45d2c0945 100644 --- a/api/graphnet.data.writers.graphnet_writer.html +++ b/api/graphnet.data.writers.graphnet_writer.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -442,6 +469,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -470,15 +504,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -490,14 +524,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -580,11 +614,11 @@
    Parameters:
      -
    • name (str | None) –

    • -
    • class_name (str | None) –

    • -
    • level (int) –

    • -
    • log_folder (str | None) –

    • -
    • kwargs (Any) –

    • +
    • name (str | None)

    • +
    • class_name (str | None)

    • +
    • level (int)

    • +
    • log_folder (str | None)

    • +
    • kwargs (Any)

    @@ -664,7 +698,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.writers.html b/api/graphnet.data.writers.html index afea72365..59d5ec37f 100644 --- a/api/graphnet.data.writers.html +++ b/api/graphnet.data.writers.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -398,6 +425,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -426,15 +460,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -446,14 +480,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -571,7 +605,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.writers.parquet_writer.html b/api/graphnet.data.writers.parquet_writer.html index 4c65807fe..2deaaf432 100644 --- a/api/graphnet.data.writers.parquet_writer.html +++ b/api/graphnet.data.writers.parquet_writer.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -424,6 +451,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -452,15 +486,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -472,14 +506,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -544,37 +578,42 @@

    DataConverter for the Parquet backend.

    -class graphnet.data.writers.parquet_writer.ParquetWriter(name, class_name, level, log_folder, **kwargs)[source]
    +class graphnet.data.writers.parquet_writer.ParquetWriter(truth_table, index_column)[source]

    Bases: GraphNeTWriter

    Class for writing interim data format to Parquet.

    -

    Construct Logger.

    +

    Construct ParquetWriter.

    Parameters:
      -
    • name (str | None) –

    • -
    • class_name (str | None) –

    • -
    • level (int) –

    • -
    • log_folder (str | None) –

    • -
    • kwargs (Any) –

    • +
    • truth_table (str, default: 'truth') – Name of the tables containing event-level truth data. +defaults to “truth”.

    • +
    • index_column (str, default: 'event_no') – The column used for indexation. +Defaults to “event_no”.

    -merge_files(files, output_dir)[source]
    -

    Merge parquet files.

    +merge_files(files, output_dir, events_per_batch, num_workers)[source] +

    Convert files into shuffled batches.

    +
    +

    Events will be shuffled, and the resulting batches will constitute +random subsamples of the full dataset.

    +
    Parameters:
      -
    • files (List[str]) – input files for merging.

    • -
    • output_dir (str) – directory to store merged file(s) in.

    • +
    • files (List[str]) – Files converted to parquet. Note this argument is ignored +by this method, as these files are automatically found +using the output_dir.

    • +
    • output_dir (str) – The directory to store the batched data.

    • +
    • events_per_batch (int, default: 200000) – Number of events in each batch. +Defaults to 200000.

    • +
    • num_workers (int, default: 1) – Number of workers to use for merging. Defaults to 1.

    -
    Raises:
    -

    NotImplementedError

    -
    -
    Return type:
    -

    None

    +
    Return type:
    +

    None

    @@ -628,7 +667,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.data.writers.sqlite_writer.html b/api/graphnet.data.writers.sqlite_writer.html index be3ac1b63..b9bff6f4f 100644 --- a/api/graphnet.data.writers.sqlite_writer.html +++ b/api/graphnet.data.writers.sqlite_writer.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -424,6 +451,13 @@ constants + +
  • + + + curated_datamodule + +
  • @@ -452,15 +486,15 @@ datamodule +
  • +
  • - pipeline + datasets -
  • -
  • @@ -472,14 +506,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -546,7 +580,7 @@ a deep-learning friendly file format.

    -class graphnet.data.writers.sqlite_writer.SQLiteWriter(merged_database_name, max_table_size)[source]
    +class graphnet.data.writers.sqlite_writer.SQLiteWriter(merged_database_name, max_table_size, index_column)[source]

    Bases: GraphNeTWriter

    A method for saving GraphNeT’s interim dataformat to SQLite.

    Initialize SQLiteWriter.

    @@ -562,12 +596,13 @@ you have many events, as tables exceeding 400 million rows tend to be noticably slower to query. Defaults to None (All events are put into a single database).

    +
  • index_column (str, default: 'event_no') – Name of column that contains event id.

  • -merge_files(files, output_dir)[source]
    +merge_files(files, output_dir, primary_key_rescue)[source]

    SQLite-specific method for merging output files/databases.

    Parameters:
    @@ -582,6 +617,9 @@ you have many events, as tables exceeding 400 million rows tend to be noticably slower to query. Defaults to None (All events are put into a single database.)

    +
  • primary_key_rescue (str, default: 'event_no') – The name of the columns on which the primary +key is constructed. This will only be used if it is not +possible to infer the primary key name.

  • Return type:
    @@ -639,7 +677,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.datasets.html b/api/graphnet.datasets.html new file mode 100644 index 000000000..2692462c3 --- /dev/null +++ b/api/graphnet.datasets.html @@ -0,0 +1,512 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + datasets — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    + +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.datasets.prometheus_datasets.html b/api/graphnet.datasets.prometheus_datasets.html new file mode 100644 index 000000000..9f28dc08a --- /dev/null +++ b/api/graphnet.datasets.prometheus_datasets.html @@ -0,0 +1,666 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + prometheus_datasets — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    prometheus_datasets

    +

    Public datasets from Prometheus Simulation.

    +
    +
    +class graphnet.datasets.prometheus_datasets.PublicPrometheusDataset(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: ERDAHostedDataset

    +

    A generic class for public Prometheus Datasets hosted using ERDA.

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    +
    +class graphnet.datasets.prometheus_datasets.TRIDENTSmall(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: PublicPrometheusDataset

    +

    Public Dataset for Prometheus simulation of a TRIDENT geometry.

    +

    Contains ~ 1 million track events between 10 GeV - 10 TeV.

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    +
    +class graphnet.datasets.prometheus_datasets.PONESmall(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: PublicPrometheusDataset

    +

    Public Dataset for Prometheus simulation of a P-ONE geometry.

    +

    Contains ~ 1 million track events between 10 GeV - 10 TeV.

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    +
    +class graphnet.datasets.prometheus_datasets.BaikalGVDSmall(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: PublicPrometheusDataset

    +

    Public Dataset for Prometheus simulation of a Baikal-GVD geometry.

    +

    Contains ~ 1 million track events between 10 GeV - 10 TeV.

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.datasets.test_dataset.html b/api/graphnet.datasets.test_dataset.html new file mode 100644 index 000000000..78409e055 --- /dev/null +++ b/api/graphnet.datasets.test_dataset.html @@ -0,0 +1,547 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + test_dataset — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    test_dataset

    +

    A CuratedDataset for unit tests.

    +
    +
    +class graphnet.datasets.test_dataset.TestDataset(graph_definition, download_dir, truth, features, backend, train_dataloader_kwargs, validation_dataloader_kwargs, test_dataloader_kwargs)[source]
    +

    Bases: ERDAHostedDataset

    +

    A CuratedDataset class for unit tests of ERDAHosted Datasets.

    +

    This dataset should not be used outside the context of unit tests.

    +

    Construct CuratedDataset.

    +
    +
    Parameters:
    +
      +
    • graph_definition (GraphDefinition) – Method that defines the data representation.

    • +
    • download_dir (str) – Directory to download dataset to.

    • +
    • truth (Optional) – List of event-level truth to include. Will +include all available information if not given.

    • +
    • features (Optional) – List of input features from pulsemap to use. +If not given, all available features will be +used.

    • +
    • backend (Optional) – data backend to use. Either “parquet” or +“sqlite”. Defaults to “parquet”.

    • +
    • train_dataloader_kwargs (Optional) – Arguments for the training +DataLoader. Default None.

    • +
    • validation_dataloader_kwargs (Optional) – Arguments for the +validation DataLoader, Default None.

    • +
    • test_dataloader_kwargs (Optional) – Arguments for the test +DataLoader. Default None.

    • +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.deployment.deployer.html b/api/graphnet.deployment.deployer.html index f72da3725..94fe641f6 100644 --- a/api/graphnet.deployment.deployer.html +++ b/api/graphnet.deployment.deployer.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -380,14 +414,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -537,7 +571,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.deployment_module.html b/api/graphnet.deployment.deployment_module.html index a8241876c..72264e3cc 100644 --- a/api/graphnet.deployment.deployment_module.html +++ b/api/graphnet.deployment.deployment_module.html @@ -123,13 +123,12 @@ - + - - + @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -369,14 +403,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -481,12 +515,12 @@ - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.html b/api/graphnet.deployment.html index 1bc16b07e..1abe19dfa 100644 --- a/api/graphnet.deployment.html +++ b/api/graphnet.deployment.html @@ -123,14 +123,13 @@ - + - - + @@ -284,14 +283,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -306,6 +333,13 @@ data + +
  • + + + datasets + +
  • @@ -354,14 +388,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -455,7 +489,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.i3modules.html b/api/graphnet.deployment.i3modules.html index 2efa006ef..d18b6168e 100644 --- a/api/graphnet.deployment.i3modules.html +++ b/api/graphnet.deployment.i3modules.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -362,14 +396,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -475,7 +509,7 @@

    i3modulesSphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.icecube.cleaning_module.html b/api/graphnet.deployment.icecube.cleaning_module.html index 47c3569b6..40f5acf49 100644 --- a/api/graphnet.deployment.icecube.cleaning_module.html +++ b/api/graphnet.deployment.icecube.cleaning_module.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -391,14 +425,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -539,7 +573,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.icecube.html b/api/graphnet.deployment.icecube.html index 9bc11ad9e..9e0686597 100644 --- a/api/graphnet.deployment.icecube.html +++ b/api/graphnet.deployment.icecube.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -376,14 +410,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -497,7 +531,7 @@

    icecube Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.icecube.i3deployer.html b/api/graphnet.deployment.icecube.i3deployer.html index ef9795a2c..26ab51030 100644 --- a/api/graphnet.deployment.icecube.i3deployer.html +++ b/api/graphnet.deployment.icecube.i3deployer.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -377,14 +411,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -484,7 +518,7 @@

    i3deployer Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.deployment.icecube.inference_module.html b/api/graphnet.deployment.icecube.inference_module.html index b9937cf35..4215022b9 100644 --- a/api/graphnet.deployment.icecube.inference_module.html +++ b/api/graphnet.deployment.icecube.inference_module.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -391,14 +425,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -535,7 +569,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.exceptions.exceptions.html b/api/graphnet.exceptions.exceptions.html new file mode 100644 index 000000000..79d8e18be --- /dev/null +++ b/api/graphnet.exceptions.exceptions.html @@ -0,0 +1,517 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + exceptions — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    exceptions

    +

    Module containing GraphNeT-specific Exceptions.

    +
    +
    +exception graphnet.exceptions.exceptions.ColumnMissingException[source]
    +

    Bases: Exception

    +

    Exception to indicate a missing column in a dataset.

    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.pisa.html b/api/graphnet.exceptions.html similarity index 83% rename from api/graphnet.pisa.html rename to api/graphnet.exceptions.html index a411dfc87..812c1837d 100644 --- a/api/graphnet.pisa.html +++ b/api/graphnet.exceptions.html @@ -61,7 +61,7 @@ - pisa — graphnet documentation + exceptions — graphnet documentation + + + + + + + + + + + + + + + + liquido — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    liquido

    +

    LiquidO-specific Detector class(es).

    +
    +
    +class graphnet.models.detector.liquido.LiquidO_v1(*args, **kwargs)[source]
    +

    Bases: Detector

    +

    Detector class for LiquidO prototype.

    +

    Construct Detector.

    +
    +
    Parameters:
    +
      +
    • args (Any)

    • +
    • kwargs (Any)

    • +
    +
    +
    Return type:
    +

    object

    +
    +
    +
    +
    +geometry_table_path = '/home/runner/.local/lib/python3.10/data/geometry_tables/liquid-o/liquido_v1.parquet'
    +
    +
    +
    +xyz = ['sipm_x', 'sipm_y', 'sipm_z']
    +
    +
    +
    +string_id_column = 'fiber_id'
    +
    +
    +
    +sensor_id_column = 'sipm_id'
    +
    +
    +
    +feature_map()[source]
    +

    Map standardization functions to each dimension.

    +
    +
    Return type:
    +

    Dict[str, Callable]

    +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.models.detector.prometheus.html b/api/graphnet.models.detector.prometheus.html index e912b56e5..ace5f174d 100644 --- a/api/graphnet.models.detector.prometheus.html +++ b/api/graphnet.models.detector.prometheus.html @@ -123,14 +123,13 @@ - + - - + @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -346,6 +387,13 @@ icecube +
  • +
  • + + + liquido + +
  • @@ -361,6 +409,104 @@ - - - - - - -
    -
    -
    - -
  • +
  • + + + training + + +
  • +
  • + + + utilities + + +
  • +
  • + + + constants + + +
  • + + + + + + + + + +
    +
    +
    + +

    +
    +class graphnet.models.detector.prometheus.IceCube86Prometheus(*args, **kwargs)[source]
    +

    Bases: Detector

    +

    Detector class for Prometheus IceCube86.

    +

    Construct Detector.

    +
    +
    Parameters:
    +
      +
    • args (Any)

    • +
    • kwargs (Any)

    • +
    +
    +
    Return type:
    +

    object

    +
    +
    +
    +
    +geometry_table_path = '/home/runner/.local/lib/python3.10/data/geometry_tables/prometheus/icecube86.parquet'
    +
    +
    +
    +xyz = ['sensor_pos_x', 'sensor_pos_y', 'sensor_pos_z']
    +
    +
    +
    +string_id_column = 'sensor_string_id'
    +
    +
    +
    +sensor_id_column = 'sensor_id'
    +
    +
    +
    +feature_map()[source]
    +

    Map standardization functions to each dimension.

    +
    +
    Return type:
    +

    Dict[str, Callable]

    +
    +
    +
    +
    +
    +
    +class graphnet.models.detector.prometheus.IceCubeDeepCore8(*args, **kwargs)[source]
    +

    Bases: Detector

    +

    Detector class for Prometheus IceCubeDeepCore8.

    +

    Construct Detector.

    +
    +
    Parameters:
    +
      +
    • args (Any)

    • +
    • kwargs (Any)

    • +
    +
    +
    Return type:
    +

    object

    +
    +
    +
    +
    +geometry_table_path = '/home/runner/.local/lib/python3.10/data/geometry_tables/prometheus/icecube_deepcore.parquet'
    +
    +
    +
    +xyz = ['sensor_pos_x', 'sensor_pos_y', 'sensor_pos_z']
    +
    +
    +
    +string_id_column = 'sensor_string_id'
    +
    +
    +
    +sensor_id_column = 'sensor_id'
    +
    +
    +
    +feature_map()[source]
    +

    Map standardization functions to each dimension.

    +
    +
    Return type:
    +

    Dict[str, Callable]

    +
    +
    +
    +
    +
    +
    +class graphnet.models.detector.prometheus.IceCubeGen2(*args, **kwargs)[source]
    +

    Bases: Detector

    +

    Detector class for Prometheus IceCubeGen2.

    +

    Construct Detector.

    +
    +
    Parameters:
    +
      +
    • args (Any)

    • +
    • kwargs (Any)

    • +
    +
    +
    Return type:
    +

    object

    +
    +
    +
    +
    +geometry_table_path = '/home/runner/.local/lib/python3.10/data/geometry_tables/prometheus/icecube_gen2.parquet'
    +
    +
    +
    +xyz = ['sensor_pos_x', 'sensor_pos_y', 'sensor_pos_z']
    +
    +
    +
    +string_id_column = 'sensor_string_id'
    +
    +
    +
    +sensor_id_column = 'sensor_id'
    +
    +
    +
    +feature_map()[source]
    +

    Map standardization functions to each dimension.

    +
    +
    Return type:
    +

    Dict[str, Callable]

    +
    +
    +
    +
    +
    +
    +class graphnet.models.detector.prometheus.PONETriangle(*args, **kwargs)[source]
    +

    Bases: Detector

    +

    Detector class for Prometheus PONE Triangle.

    +

    Construct Detector.

    +
    +
    Parameters:
    +
      +
    • args (Any)

    • +
    • kwargs (Any)

    • +
    +
    +
    Return type:
    +

    object

    +
    +
    +
    +
    +geometry_table_path = '/home/runner/.local/lib/python3.10/data/geometry_tables/prometheus/pone_triangle.parquet'
    +
    +
    +
    +xyz = ['sensor_pos_x', 'sensor_pos_y', 'sensor_pos_z']
    +
    +
    +
    +string_id_column = 'sensor_string_id'
    +
    +
    +
    +sensor_id_column = 'sensor_id'
    +
    +
    +
    +feature_map()[source]
    +

    Map standardization functions to each dimension.

    +
    +
    Return type:
    +

    Dict[str, Callable]

    +
    +
    +
    +
    +
    class graphnet.models.detector.prometheus.Prometheus(*args, **kwargs)[source]
    -

    Bases: ORCA150

    -

    Reference to ORCA150.

    +

    Bases: ORCA150SuperDense

    +

    Reference to ORCA150SuperDense.

    Construct Detector.

    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -650,7 +1952,7 @@
    Return type:
    @@ -598,7 +639,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -652,7 +693,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.convnet.html b/api/graphnet.models.gnn.convnet.html index 2ff5cc25d..dd7af7e29 100644 --- a/api/graphnet.models.gnn.convnet.html +++ b/api/graphnet.models.gnn.convnet.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -445,6 +486,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -482,13 +530,6 @@
  • - -
  • - - - pisa - -
  • @@ -565,8 +606,8 @@ output layer.

  • nb_intermediate (int, default: 128) – Number of nodes in intermediate layer(s).

  • dropout_ratio (float, default: 0.3) – Fraction of nodes to drop.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -582,7 +623,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -636,7 +677,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.dynedge.html b/api/graphnet.models.gnn.dynedge.html index 2c625cd19..8bed660e2 100644 --- a/api/graphnet.models.gnn.dynedge.html +++ b/api/graphnet.models.gnn.dynedge.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -445,6 +486,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -482,13 +530,6 @@
  • - -
  • - - - pisa - -
  • @@ -587,13 +628,13 @@ after global pooling. The alternative is to added (distribute) them to the individual nodes before any convolutional operations.

  • -
  • activation_layer (Optional[Callable], default: None) – The activation function to use in the model.

  • +
  • activation_layer (Optional[str], default: None) – The activation function to use in the model.

  • add_norm_layer (bool, default: False) – Whether to add a normalization layer after each linear layer.

  • skip_readout (bool, default: False) – Whether to skip the readout layer(s). If True, the output of the last post-processing layer is returned directly.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -609,7 +650,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -663,7 +704,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.dynedge_jinst.html b/api/graphnet.models.gnn.dynedge_jinst.html index e92659e50..4e1642a04 100644 --- a/api/graphnet.models.gnn.dynedge_jinst.html +++ b/api/graphnet.models.gnn.dynedge_jinst.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -445,6 +486,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -482,13 +530,6 @@
  • - -
  • - - - pisa - -
  • @@ -562,8 +603,8 @@
  • nb_inputs (int) – Number of input features.

  • nb_outputs – Number of output features.

  • layer_size_scale (int, default: 4) – Integer that scales the size of hidden layers.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -579,7 +620,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -633,7 +674,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.dynedge_kaggle_tito.html b/api/graphnet.models.gnn.dynedge_kaggle_tito.html index 93f3d6a20..891f743da 100644 --- a/api/graphnet.models.gnn.dynedge_kaggle_tito.html +++ b/api/graphnet.models.gnn.dynedge_kaggle_tito.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -445,6 +486,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -482,13 +530,6 @@
  • - -
  • - - - pisa - -
  • @@ -580,8 +621,8 @@
  • n_head (int, default: 8) – The number of heads to use in the DynTrans layer.

  • nb_neighbours (int, default: 8) – The number of neighbours to use in the DynTrans layer.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -597,7 +638,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -651,7 +692,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.gnn.html b/api/graphnet.models.gnn.gnn.html index a7cb5d36a..d9e11f75c 100644 --- a/api/graphnet.models.gnn.gnn.html +++ b/api/graphnet.models.gnn.gnn.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -463,6 +504,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -500,13 +548,6 @@
  • - -
  • - - - pisa - -
  • @@ -580,8 +621,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -607,7 +648,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -661,7 +702,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.html b/api/graphnet.models.gnn.html index d5dff7670..1e8916ed2 100644 --- a/api/graphnet.models.gnn.html +++ b/api/graphnet.models.gnn.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -419,6 +460,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -456,13 +504,6 @@
  • - -
  • - - - pisa - -
  • @@ -594,7 +635,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.gnn.icemix.html b/api/graphnet.models.gnn.icemix.html index 47931ca5a..3330d98f8 100644 --- a/api/graphnet.models.gnn.icemix.html +++ b/api/graphnet.models.gnn.icemix.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -454,6 +495,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -491,13 +539,6 @@
  • - -
  • - - - pisa - -
  • @@ -576,7 +617,8 @@
    Parameters:
    • hidden_dim (int, default: 384) – The latent feature dimension.

    • -
    • seq_length (int, default: 128) – The base feature dimension.

    • +
    • mlp_ratio (int, default: 4) – Mlp expansion ratio of FourierEncoder and Transformer.

    • +
    • seq_length (int, default: 192) – The base feature dimension.

    • depth (int, default: 12) – The depth of the transformer.

    • head_size (int, default: 32) – The size of the attention heads.

    • depth_rel (int, default: 4) – The depth of the relative transformer.

    • @@ -588,8 +630,9 @@ provided, DynEdge will be initialized with the original Kaggle Competition settings. If include_dynedge is False, this argument have no impact.

      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • n_features (int, default: 6) – The number of features in the input data.

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -615,7 +658,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -669,7 +712,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.edges.edges.html b/api/graphnet.models.graphs.edges.edges.html index 13f2d9245..d99618b17 100644 --- a/api/graphnet.models.graphs.edges.edges.html +++ b/api/graphnet.models.graphs.edges.edges.html @@ -123,10 +123,9 @@ - + - @@ -287,14 +286,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -309,6 +336,13 @@ data + +
  • + + + datasets + +
  • @@ -316,6 +350,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -473,6 +514,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -510,13 +558,6 @@
  • - -
  • - - - pisa - -
  • @@ -592,8 +633,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -631,11 +672,11 @@
    • nb_nearest_neighbours (int) – number of neighbours.

    • columns (List[int], default: [0, 1, 2]) – Node features to use for distance calculation.

    • -
    • [0 (Defaults to) –

    • -
    • 1

    • -
    • 2].

    • -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • [0 (Defaults to)

    • +
    • 1

    • +
    • 2].

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -657,11 +698,11 @@
    • radius (float) – radius of sphere

    • columns (List[int], default: [0, 1, 2]) – columns of the node feature matrix used.

    • -
    • [0 (Defaults to) –

    • -
    • 1

    • -
    • 2].

    • -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • [0 (Defaults to)

    • +
    • 1

    • +
    • 2].

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -679,8 +720,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -737,7 +778,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.edges.html b/api/graphnet.models.graphs.edges.html index d6afe8a37..8906969b4 100644 --- a/api/graphnet.models.graphs.edges.html +++ b/api/graphnet.models.graphs.edges.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data +
  • +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -420,6 +461,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -457,13 +505,6 @@
  • - -
  • - - - pisa - -
  • @@ -582,7 +623,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.edges.minkowski.html b/api/graphnet.models.graphs.edges.minkowski.html index b1401e5d6..78d5165b9 100644 --- a/api/graphnet.models.graphs.edges.minkowski.html +++ b/api/graphnet.models.graphs.edges.minkowski.html @@ -123,10 +123,9 @@ - + - @@ -287,14 +286,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -309,6 +336,13 @@ data + +
  • + + + datasets + +
  • @@ -316,6 +350,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -444,6 +485,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -481,13 +529,6 @@
  • - -
  • - - - pisa - -
  • @@ -582,8 +623,8 @@ nearest neighbours.

  • space_coords (Optional[List[int]], default: None) – Coordinates of x, y, z.

  • time_coord (Optional[int], default: 3) – Coordinate of time.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -640,7 +681,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.graph_definition.html b/api/graphnet.models.graphs.graph_definition.html index 9842afef9..892a56090 100644 --- a/api/graphnet.models.graphs.graph_definition.html +++ b/api/graphnet.models.graphs.graph_definition.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -431,6 +472,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -468,13 +516,6 @@
  • - -
  • - - - pisa - -
  • @@ -578,8 +619,8 @@ sensor listed here will be removed from the graph. Defaults to None.

  • string_mask (Optional[List[int]], default: None) – A list of string id’s to be masked from the graph. Defaults to None.

  • sort_by (Optional[str], default: None) – Name of node feature to sort by. Defaults to None.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -664,7 +705,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.graphs.html b/api/graphnet.models.graphs.graphs.html index e904aef71..6f19f2668 100644 --- a/api/graphnet.models.graphs.graphs.html +++ b/api/graphnet.models.graphs.graphs.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -420,6 +461,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -457,13 +505,6 @@
  • - -
  • - - - pisa - -
  • @@ -541,11 +582,11 @@ Defaults to None.

  • nb_nearest_neighbours (int, default: 8) – Number of edges for each node. Defaults to 8.

  • columns (List[int], default: [0, 1, 2]) – node feature columns used for distance calculation

  • -
  • [0 (. Defaults to) –

  • -
  • 1

  • -
  • 2].

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • [0 (. Defaults to)

  • +
  • 1

  • +
  • 2].

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -602,7 +643,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.html b/api/graphnet.models.graphs.html index 2507abc16..b5649038b 100644 --- a/api/graphnet.models.graphs.html +++ b/api/graphnet.models.graphs.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -405,6 +446,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -442,13 +490,6 @@
  • - -
  • - - - pisa - -
  • @@ -585,7 +626,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.nodes.html b/api/graphnet.models.graphs.nodes.html index b8b2b714e..bebf84303 100644 --- a/api/graphnet.models.graphs.nodes.html +++ b/api/graphnet.models.graphs.nodes.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -413,6 +454,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -450,13 +498,6 @@
  • - -
  • - - - pisa - -
  • @@ -571,7 +612,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.nodes.nodes.html b/api/graphnet.models.graphs.nodes.nodes.html index 42398f811..845670a3d 100644 --- a/api/graphnet.models.graphs.nodes.nodes.html +++ b/api/graphnet.models.graphs.nodes.nodes.html @@ -123,10 +123,9 @@ - + - @@ -287,14 +286,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -309,6 +336,13 @@ data + +
  • + + + datasets + +
  • @@ -316,6 +350,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -502,6 +543,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -539,13 +587,6 @@
  • - -
  • - - - pisa - -
  • @@ -629,8 +670,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -645,7 +686,7 @@
    Parameters:
    • x (tensor) – standardized node features with shape ´[num_pulses, d]´,

    • -
    • features. (where ´d´ is the number of node) –

    • +
    • features. (where ´d´ is the number of node)

    • node_feature_names – list of names for each column in ´x´.

    @@ -685,7 +726,7 @@
    Parameters:
    • input_feature_names (List[str]) – List of column names of the input to the

    • -
    • definition. (node) –

    • +
    • definition. (node)

    Return type:
    @@ -703,8 +744,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -729,8 +770,8 @@
  • percentiles (List[int]) – List of percentiles. E.g. [10, 50, 90].

  • add_counts (bool, default: True) – If True, number of duplicates is added to output array.

  • input_feature_names (Optional[List[str]], default: None) – (Optional) column names for input features.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -752,8 +793,8 @@
  • time_column (str, default: 'dom_time') – Name of time column.

  • charge_column (str, default: 'charge') – Name of charge column.

  • max_activations (Optional[int], default: None) – Maximum number of activations to include in the time series.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -774,12 +815,16 @@
    Parameters:
    • input_feature_names (Optional[List[str]], default: None) – Column names for input features. Minimum

    • -
    • names. (required features are z coordinate and hlc column) –

    • +
    • names. (required features are z coordinate and hlc column)

    • max_pulses (int, default: 768) – Maximum number of pulses to keep in the event.

    • z_name (str, default: 'dom_z') – Name of the z-coordinate column.

    • -
    • hlc_name (str, default: 'hlc') – Name of the Hard Local Coincidence Check column.

    • -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • hlc_name (Optional[str], default: 'hlc') – Name of the Hard Local Coincidence Check column.

    • +
    • add_ice_properties (bool, default: True) – If True, scattering and absoption length of

    • +
    • coordinate. (ice in IceCube are added to the feature set based on z)

    • +
    • ice_args (Dict[str, Optional[float]], default: {'z_offset': None, 'z_scaling': None}) – Offset and scaling of the z coordinate in the Detector,

    • +
    • data. (to be able to make similar conversion in the ice)

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -836,7 +881,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.graphs.utils.html b/api/graphnet.models.graphs.utils.html index e80c9c7d5..4fc59ed86 100644 --- a/api/graphnet.models.graphs.utils.html +++ b/api/graphnet.models.graphs.utils.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data +
  • +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -456,6 +497,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -493,13 +541,6 @@
  • - -
  • - - - pisa - -
  • @@ -629,8 +670,8 @@
    Parameters:
      -
    • feature_names (List[str]) –

    • -
    • cluster_on (List[str]) –

    • +
    • feature_names (List[str])

    • +
    • cluster_on (List[str])

    @@ -661,7 +702,7 @@
  • cluster_indices (List[int]) – List of column indices on which the clusters are constructed.

  • percentiles (List[int]) – percentiles used to summarize x. E.g. [10,50,90].

  • -
  • add_counts (bool) –

  • +
  • add_counts (bool)

  • Return type:
    @@ -674,7 +715,7 @@
    -graphnet.models.graphs.utils.ice_transparency()[source]
    +graphnet.models.graphs.utils.ice_transparency(z_offset, z_scaling)[source]

    Return interpolation functions for optical properties of IceCube.

    NOTE: The resulting interpolation functions assumes that the @@ -682,8 +723,14 @@ Any deviation from this scaling method results in inaccurate results.

    -
    Returns:
    -

    +
    Parameters:
    +
      +
    • z_offset (Optional[float], default: None) – Offset to be added to the depth of the DOM.

    • +
    • z_scaling (Optional[float], default: None) – Scaling factor to be applied to the depth of the DOM.

    • +
    +
    +
    Returns:
    +

    Function that takes a normalized depth and returns the

    corresponding normalized scattering length.

    f_absorption: Function that takes a normalized depth and returns the

    corresponding normalized absorption length.

    @@ -691,8 +738,8 @@

    -
    Return type:
    -

    f_scattering

    +
    Return type:
    +

    f_scattering

    @@ -745,7 +792,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.html b/api/graphnet.models.html index 15ae7feb7..fbf317188 100644 --- a/api/graphnet.models.html +++ b/api/graphnet.models.html @@ -123,14 +123,14 @@ - + + - - + @@ -284,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -306,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -313,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -369,6 +411,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -406,13 +455,6 @@
  • - -
  • - - - pisa - -
  • @@ -479,6 +521,7 @@
  • detector
  • @@ -510,6 +553,10 @@
  • task
  • +
  • transformer +
  • Submodules

    @@ -557,7 +604,7 @@
    @@ -630,7 +671,7 @@

    Model

    Parameters:
    -

    path (str) –

    +

    path (str)

    @@ -643,7 +684,7 @@

    None

    Parameters:
    -

    path (str) –

    +

    path (str)

    @@ -657,8 +698,8 @@
    Parameters:
    @@ -675,7 +716,7 @@
  • load_modules (Optional[List[str]], default: None) – List of modules used in the definition of the model which, as a consequence, need to be loaded into the global namespace. Defaults to loading torch.

  • -
  • source (ModelConfig | str) –

  • +
  • source (ModelConfig | str)

  • Raises:
    @@ -696,7 +737,7 @@

    None

    Parameters:
    -

    verbose_print (bool) –

    +

    verbose_print (bool)

    @@ -724,8 +765,8 @@
    Parameters:
    @@ -780,7 +821,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.rnn.html b/api/graphnet.models.rnn.html index 260861d9f..6fa9b2771 100644 --- a/api/graphnet.models.rnn.html +++ b/api/graphnet.models.rnn.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -377,6 +418,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -414,13 +462,6 @@
  • - -
  • - - - pisa - -
  • @@ -528,7 +569,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.rnn.node_rnn.html b/api/graphnet.models.rnn.node_rnn.html index 28807ca58..445c54760 100644 --- a/api/graphnet.models.rnn.node_rnn.html +++ b/api/graphnet.models.rnn.node_rnn.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -412,6 +453,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -449,13 +497,6 @@
  • - -
  • - - - pisa - -
  • @@ -543,8 +584,8 @@
  • features_subset (Optional[List[int]], default: None) – The subset of latent features on each node that are used as metric dimensions when performing the k-nearest neighbours clustering. Defaults to [0,1,2,3]

  • dropout (float, default: 0.5) – Dropout fraction to use in the RNN. Defaults to 0.5.

  • embedding_dim (int, default: 0) – Embedding dimension of the RNN. Defaults to no embedding.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -573,7 +614,7 @@

    Tensor

    Parameters:
    -

    data (Data) –

    +

    data (Data)

    @@ -627,7 +668,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.standard_averaged_model.html b/api/graphnet.models.standard_averaged_model.html index 271f4d19e..e57f946c4 100644 --- a/api/graphnet.models.standard_averaged_model.html +++ b/api/graphnet.models.standard_averaged_model.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -361,6 +402,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -468,13 +516,6 @@
  • - -
  • - - - pisa - -
  • @@ -552,8 +593,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -570,8 +611,8 @@
    Parameters:
      -
    • train_batch (Data | List[Data]) –

    • -
    • batch_idx (int) –

    • +
    • train_batch (Data | List[Data])

    • +
    • batch_idx (int)

    @@ -586,8 +627,8 @@
    Parameters:
      -
    • val_batch (Data | List[Data]) –

    • -
    • batch_idx (int) –

    • +
    • val_batch (Data | List[Data])

    • +
    • batch_idx (int)

    @@ -602,10 +643,10 @@
    Parameters:
      -
    • epoch (int) –

    • -
    • batch_idx (int) –

    • -
    • optimizer (Type[Optimizer]) –

    • -
    • optimizer_closure (Callable[[], Any] | None) –

    • +
    • epoch (int)

    • +
    • batch_idx (int)

    • +
    • optimizer (Type[Optimizer])

    • +
    • optimizer_closure (Callable[[], Any] | None)

    @@ -620,8 +661,8 @@
    Parameters:
      -
    • path (str | Dict) –

    • -
    • kargs (Any | None) –

    • +
    • path (str | Dict)

    • +
    • kargs (Any | None)

    @@ -686,7 +727,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.standard_model.html b/api/graphnet.models.standard_model.html index b6edf21db..9353890c2 100644 --- a/api/graphnet.models.standard_model.html +++ b/api/graphnet.models.standard_model.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -361,6 +402,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -540,13 +588,6 @@
  • - -
  • - - - pisa - -
  • @@ -642,8 +683,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -660,18 +701,18 @@
    Parameters:
      -
    • train_dataloader (DataLoader) –

    • -
    • val_dataloader (DataLoader | None) –

    • -
    • max_epochs (int) –

    • -
    • early_stopping_patience (int) –

    • -
    • gpus (List[int] | int | None) –

    • -
    • callbacks (List[Callback] | None) –

    • -
    • ckpt_path (str | None) –

    • -
    • logger (Logger | None) –

    • -
    • log_every_n_steps (int) –

    • -
    • gradient_clip_val (float | None) –

    • -
    • distribution_strategy (str | None) –

    • -
    • trainer_kwargs (Any) –

    • +
    • train_dataloader (DataLoader)

    • +
    • val_dataloader (DataLoader | None)

    • +
    • max_epochs (int)

    • +
    • early_stopping_patience (int)

    • +
    • gpus (List[int] | int | None)

    • +
    • callbacks (List[Callback] | None)

    • +
    • ckpt_path (str | None)

    • +
    • logger (Logger | None)

    • +
    • log_every_n_steps (int)

    • +
    • gradient_clip_val (float | None)

    • +
    • distribution_strategy (str | None)

    • +
    • trainer_kwargs (Any)

    @@ -705,7 +746,7 @@

    List[Union[Tensor, Data]]

    Parameters:
    -

    data (Data | List[Data]) –

    +

    data (Data | List[Data])

    @@ -721,8 +762,8 @@
    Parameters:
      -
    • batch (List[Data]) –

    • -
    • batch_idx (int) –

    • +
    • batch (List[Data])

    • +
    • batch_idx (int)

    @@ -737,8 +778,8 @@
    Parameters:
      -
    • train_batch (Data | List[Data]) –

    • -
    • batch_idx (int) –

    • +
    • train_batch (Data | List[Data])

    • +
    • batch_idx (int)

    @@ -753,8 +794,8 @@
    Parameters:
      -
    • val_batch (Data | List[Data]) –

    • -
    • batch_idx (int) –

    • +
    • val_batch (Data | List[Data])

    • +
    • batch_idx (int)

    @@ -769,9 +810,9 @@
    Parameters:
      -
    • preds (Tensor) –

    • -
    • data (List[Data]) –

    • -
    • verbose (bool) –

    • +
    • preds (Tensor)

    • +
    • data (List[Data])

    • +
    • verbose (bool)

    @@ -795,7 +836,7 @@

    Model

    Parameters:
    -

    mode (bool) –

    +

    mode (bool)

    @@ -809,9 +850,9 @@
    Parameters:
      -
    • dataloader (DataLoader) –

    • -
    • gpus (List[int] | int | None) –

    • -
    • distribution_strategy (str | None) –

    • +
    • dataloader (DataLoader)

    • +
    • gpus (List[int] | int | None)

    • +
    • distribution_strategy (str | None)

    @@ -828,11 +869,11 @@
    Parameters:
      -
    • dataloader (DataLoader) –

    • -
    • prediction_columns (List[str] | None) –

    • -
    • additional_attributes (List[str] | None) –

    • -
    • gpus (List[int] | int | None) –

    • -
    • distribution_strategy (str | None) –

    • +
    • dataloader (DataLoader)

    • +
    • prediction_columns (List[str] | None)

    • +
    • additional_attributes (List[str] | None)

    • +
    • gpus (List[int] | int | None)

    • +
    • distribution_strategy (str | None)

    @@ -887,7 +928,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.task.classification.html b/api/graphnet.models.task.classification.html index d37dabeb0..a9f149d0f 100644 --- a/api/graphnet.models.task.classification.html +++ b/api/graphnet.models.task.classification.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -482,6 +523,13 @@
  • + +
  • + + + transformer + +
  • @@ -519,13 +567,6 @@
  • - -
  • - - - pisa - -
  • @@ -616,8 +657,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -637,8 +678,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -670,8 +711,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -740,7 +781,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.task.html b/api/graphnet.models.task.html index d6336a83f..458218d44 100644 --- a/api/graphnet.models.task.html +++ b/api/graphnet.models.task.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data +
  • +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -391,6 +432,13 @@
  • + +
  • + + + transformer + +
  • @@ -428,13 +476,6 @@
  • - -
  • - - - pisa - -
  • @@ -568,7 +609,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.task.reconstruction.html b/api/graphnet.models.task.reconstruction.html index 77ad75bcc..b335f1754 100644 --- a/api/graphnet.models.task.reconstruction.html +++ b/api/graphnet.models.task.reconstruction.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -891,6 +932,13 @@
  • + +
  • + + + transformer + +
  • @@ -928,13 +976,6 @@
  • - -
  • - - - pisa - -
  • @@ -1131,8 +1172,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1164,8 +1205,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1197,8 +1238,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1230,8 +1271,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1263,8 +1304,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1296,8 +1337,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1329,8 +1370,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1362,8 +1403,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1395,8 +1436,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1428,8 +1469,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1461,8 +1502,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1494,8 +1535,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1528,8 +1569,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1598,7 +1639,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.task.task.html b/api/graphnet.models.task.task.html index b842f56bf..996dd3892 100644 --- a/api/graphnet.models.task.task.html +++ b/api/graphnet.models.task.task.html @@ -123,13 +123,12 @@ - + - - + @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -315,6 +349,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -596,6 +637,13 @@
  • + +
  • + + + transformer + +
  • @@ -633,13 +681,6 @@
  • - -
  • - - - pisa - -
  • @@ -785,8 +826,8 @@ is tested on the range [-1e6, 1e6].

  • loss_weight (Optional[str], default: None) – Name of the attribute in data containing per-event loss weights.

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -844,8 +885,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -863,8 +904,8 @@
    Parameters:
    @@ -885,7 +926,7 @@

    Union[Tensor, Data]

    Parameters:
    -

    x (Tensor | Data) –

    +

    x (Tensor | Data)

    @@ -904,8 +945,8 @@
  • hidden_size (int) – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -927,8 +968,8 @@
    Parameters:
    @@ -947,8 +988,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -984,8 +1025,8 @@
  • hidden_size – The number of columns in the output of the last latent layer of Model using this Task. Available through Model.nb_outputs

  • -
  • args (Any) –

  • -
  • kwargs (Any) –

  • +
  • args (Any)

  • +
  • kwargs (Any)

  • Return type:
    @@ -1012,8 +1053,8 @@
    Parameters:
      -
    • x (Tensor | Data) –

    • -
    • jacobian (Tensor | None) –

    • +
    • x (Tensor | Data)

    • +
    • jacobian (Tensor | None)

    @@ -1065,12 +1106,12 @@ - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.models.transformer.html b/api/graphnet.models.transformer.html new file mode 100644 index 000000000..e492e61d1 --- /dev/null +++ b/api/graphnet.models.transformer.html @@ -0,0 +1,583 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + transformer — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    transformer

    +

    Transformer-specific modules.

    +

    Submodules

    +
    + +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.models.transformer.iseecube.html b/api/graphnet.models.transformer.iseecube.html new file mode 100644 index 000000000..7b5e1e52b --- /dev/null +++ b/api/graphnet.models.transformer.iseecube.html @@ -0,0 +1,654 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + iseecube — graphnet documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content +
    + +
    + + +
    + + + + +
    +
    + +
    +
    +
    + +
    +
    +
    +
    +
    +
    + + +
    +
    +
    + +
    +
    + +
    +

    iseecube

    +

    Implementation of ISeeCube Transformer architecture used in.

    +

    https://github.com/ChenLi2049/ISeeCube/

    +
    +
    +class graphnet.models.transformer.iseecube.ISeeCube(*args, **kwargs)[source]
    +

    Bases: GNN

    +

    ISeeCube model.

    +

    Construct ISeeCube.

    +
    +
    Parameters:
    +
      +
    • hidden_dim (int, default: 384) – The latent feature dimension.

    • +
    • seq_length (int, default: 196) – The number of pulses in a neutrino event.

    • +
    • num_layers (int, default: 16) – The depth of the transformer.

    • +
    • num_heads (int, default: 12) – The number of the attention heads.

    • +
    • mlp_dim (int, default: 1536) – The mlp dimension of FourierEncoder and Transformer.

    • +
    • rel_pos_buckets (int, default: 32) – Relative position buckets for relative position +bias.

    • +
    • max_rel_pos (int, default: 256) – Maximum relative position for relative position bias.

    • +
    • num_register_tokens (int, default: 3) – The number of register tokens.

    • +
    • scaled_emb (bool, default: False) – Whether to scale the sinusoidal positional embeddings.

    • +
    • n_features (int, default: 6) – The number of features in the input data.

    • +
    • args (Any)

    • +
    • kwargs (Any)

    • +
    +
    +
    Return type:
    +

    object

    +
    +
    +
    +
    +forward(data)[source]
    +

    Apply learnable forward pass.

    +
    +
    Return type:
    +

    Tensor

    +
    +
    Parameters:
    +

    data (Data)

    +
    +
    +
    +
    +
    + + +
    +
    +
    +
    +
    + + + + + \ No newline at end of file diff --git a/api/graphnet.models.utils.html b/api/graphnet.models.utils.html index 8f52d93c0..41f1210f7 100644 --- a/api/graphnet.models.utils.html +++ b/api/graphnet.models.utils.html @@ -123,13 +123,12 @@ - + - - + @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -314,6 +348,13 @@ deployment +
  • +
  • + + + exceptions + +
  • @@ -361,6 +402,13 @@ task +
  • +
  • + + + transformer + +
  • @@ -448,13 +496,6 @@
  • - -
  • - - - pisa - -
  • @@ -533,9 +574,9 @@
    Parameters:
      -
    • x (Tensor) –

    • -
    • edge_index (LongTensor) –

    • -
    • batch (Batch) –

    • +
    • x (Tensor)

    • +
    • edge_index (LongTensor)

    • +
    • batch (Batch)

    @@ -589,7 +630,7 @@
    Parameters:
    • x (Tensor) – array-like tensor with shape [n,d] where n is the total number

    • -
    • features. (of pulses in the batch and d is the number of node) –

    • +
    • features. (of pulses in the batch and d is the number of node)

    • batch_idx (LongTensor) – a LongTensor identifying which row in x belongs to which training example. E.g. torch_geometric.data.Batch.batch.

    • @@ -638,12 +679,12 @@ - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.pisa.fitting.html b/api/graphnet.pisa.fitting.html deleted file mode 100644 index c40fe61bf..000000000 --- a/api/graphnet.pisa.fitting.html +++ /dev/null @@ -1,673 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - fitting — graphnet documentation - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Skip to content -
      - -
      - - -
      - - - - -
      -
      - -
      -
      -
      - -
      -
      -
      -
      -
      -
      - - -
      -
      -
      - -
      -
      - -
      -

      fitting

      -

      Functions and classes for fitting contours using PISA.

      -
      -
      -graphnet.pisa.fitting.config_updater(config_path, new_config_path, dummy_section)[source]
      -

      Update config files and saves them to file.

      -
      -
      Parameters:
      -
        -
      • config_path (str) – Path to original config file.

      • -
      • new_config_path (Optional[str], default: None) – Path to save updated config file.

      • -
      • dummy_section (str, default: 'temp') – Dummy section name to use for config files without -section headers.

      • -
      -
      -
      Yields:
      -

      ConfigUpdater instance for programatically updating config file.

      -
      -
      Return type:
      -

      ConfigUpdater

      -
      -
      -
      -
      -
      -class graphnet.pisa.fitting.WeightFitter(database_path, truth_table, index_column, statistical_fit)[source]
      -

      Bases: object

      -

      Class for fitting weights using PISA.

      -

      Construct WeightFitter.

      -
      -
      Parameters:
      -
        -
      • database_path (str) –

      • -
      • truth_table (str) –

      • -
      • index_column (str) –

      • -
      • statistical_fit (bool) –

      • -
      -
      -
      -
      -
      -fit_weights(config_outdir, weight_name, pisa_config_dict, add_to_database)[source]
      -

      Fit flux weights to each neutrino event in self._database_path.

      -

      If statistical_fit=True, only statistical effects are accounted for. -If True, certain systematic effects are included, but not -hypersurfaces.

      -
      -
      Parameters:
      -
        -
      • config_outdir (str) – The output directory in which to store the -configuration.

      • -
      • weight_name (str, default: '') – The name of the weight. If add_to_database=True, -this will be the name of the table.

      • -
      • pisa_config_dict (Optional[Dict], default: None) – The dictionary of PISA configurations. Can be -used to change assumptions regarding the fit.

      • -
      • add_to_database (bool, default: False) – If True, a table will be added to the database -called weight_name with two columns: -[index_column, weight_name]

      • -
      -
      -
      Return type:
      -

      DataFrame

      -
      -
      Returns:
      -

      A dataframe with columns [index_column, weight_name].

      -
      -
      -
      -
      -
      -
      -class graphnet.pisa.fitting.ContourFitter(outdir, pipeline_path, post_fix, model_name, include_retro, statistical_fit)[source]
      -

      Bases: object

      -

      Class for fitting contours using PISA.

      -

      Construct ContourFitter.

      -
      -
      Parameters:
      -
        -
      • outdir (str) –

      • -
      • pipeline_path (str) –

      • -
      • post_fix (str) –

      • -
      • model_name (str) –

      • -
      • include_retro (bool) –

      • -
      • statistical_fit (bool) –

      • -
      -
      -
      -
      -
      -fit_1d_contour(run_name, config_dict, grid_size, n_workers, theta23_minmax=(36.0, 54.0), dm31_minmax=(2.3, 2.7))[source]
      -

      Fit 1D contours.

      -
      -
      Return type:
      -

      None

      -
      -
      Parameters:
      -
        -
      • run_name (str) –

      • -
      • config_dict (Dict) –

      • -
      • grid_size (int) –

      • -
      • n_workers (int) –

      • -
      • theta23_minmax (Tuple[float, float]) –

      • -
      • dm31_minmax (Tuple[float, float]) –

      • -
      -
      -
      -
      -
      -
      -fit_2d_contour(run_name, config_dict, grid_size, n_workers, theta23_minmax=(36.0, 54.0), dm31_minmax=(2.3, 2.7))[source]
      -

      Fit 2D contours.

      -
      -
      Return type:
      -

      None

      -
      -
      Parameters:
      -
        -
      • run_name (str) –

      • -
      • config_dict (Dict) –

      • -
      • grid_size (int) –

      • -
      • n_workers (int) –

      • -
      • theta23_minmax (Tuple[float, float]) –

      • -
      • dm31_minmax (Tuple[float, float]) –

      • -
      -
      -
      -
      -
      -
      - - -
      -
      -
      -
      -
      - - - - - \ No newline at end of file diff --git a/api/graphnet.training.callbacks.html b/api/graphnet.training.callbacks.html index 30f76fb50..0d5fbee57 100644 --- a/api/graphnet.training.callbacks.html +++ b/api/graphnet.training.callbacks.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
    • - Install + Installation
    • - Contribute + Models In GraphNeT + + +
    • +
    • + + + Datasets In GraphNeT + + +
    • +
    • + + + Data Conversion in GraphNeT + + +
    • +
    • + + + Integrating New Experiments into GraphNeT + + +
    • +
    • + + + Contributing To GraphNeT
    • @@ -307,6 +334,13 @@ data + +
    • + + + datasets + +
    • @@ -318,14 +352,14 @@
    • - models + exceptions
    • - pisa + models
    • @@ -652,8 +686,8 @@
      Parameters:
        -
      • refresh_rate (int) –

      • -
      • process_position (int) –

      • +
      • refresh_rate (int)

      • +
      • process_position (int)

      @@ -707,8 +741,8 @@
    Parameters:
      -
    • trainer (Trainer) –

    • -
    • model (LightningModule) –

    • +
    • trainer (Trainer)

    • +
    • model (LightningModule)

    @@ -726,8 +760,8 @@
    Parameters:
      -
    • trainer (Trainer) –

    • -
    • model (LightningModule) –

    • +
    • trainer (Trainer)

    • +
    • model (LightningModule)

    @@ -743,8 +777,8 @@
    Parameters:
      -
    • trainer (Trainer) –

    • -
    • model (LightningModule) –

    • +
    • trainer (Trainer)

    • +
    • model (LightningModule)

    @@ -883,7 +917,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.training.html b/api/graphnet.training.html index 43a1760a0..caa444970 100644 --- a/api/graphnet.training.html +++ b/api/graphnet.training.html @@ -123,14 +123,13 @@ - + - - + @@ -284,14 +283,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -306,6 +333,13 @@ data + +
  • + + + datasets + +
  • @@ -317,14 +351,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -433,6 +467,7 @@
  • labels
  • loss_functions
  • @@ -388,6 +424,13 @@ Direction + +
  • + + + Track + +
  • @@ -453,6 +496,8 @@
  • Direction +
  • +
  • Track
  • @@ -507,6 +552,28 @@ +
    +
    +class graphnet.training.labels.Track(key, pid_key, interaction_key)[source]
    +

    Bases: Label

    +

    Class for producing NuMuCC label.

    +

    Label is set to 1 if the event is a NuMu CC event, else 0.

    +

    Construct Track label.

    +
    +
    Parameters:
    +
      +
    • key (str, default: 'track') – The name of the field in Data where the label will be +stored. That is, graph[key] = label.

    • +
    • pid_key (str, default: 'pid') – The name of the pre-existing key in graph that will +be used to access the pdg encoding, used when calculating +the direction.

    • +
    • interaction_key (str, default: 'interaction_type') – The name of the pre-existing key in graph that +will be used to access the interaction type (1 denoting CC), +used when calculating the direction.

    • +
    +
    +
    +
    @@ -556,7 +623,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.training.loss_functions.html b/api/graphnet.training.loss_functions.html index bcf94e1bb..f348c299e 100644 --- a/api/graphnet.training.loss_functions.html +++ b/api/graphnet.training.loss_functions.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -641,8 +675,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -660,7 +694,7 @@
  • target (Tensor) – Tensor containing targets. Shape [N,T]

  • return_elements (bool, default: False) – Whether elementwise loss terms should be returned. The alternative is to return the averaged loss across examples.

  • -
  • weights (Tensor | None) –

  • +
  • weights (Tensor | None)

  • Return type:
    @@ -682,8 +716,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -700,8 +734,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -719,8 +753,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -740,8 +774,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -760,8 +794,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -806,9 +840,9 @@
    Parameters:
      -
    • ctx (Any) –

    • -
    • m (int) –

    • -
    • kappa (Tensor) –

    • +
    • ctx (Any)

    • +
    • m (int)

    • +
    • kappa (Tensor)

    @@ -823,8 +857,8 @@
    Parameters:
      -
    • ctx (Any) –

    • -
    • grad_output (Tensor) –

    • +
    • ctx (Any)

    • +
    • grad_output (Tensor)

    @@ -841,8 +875,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -859,8 +893,8 @@
    Parameters:
      -
    • m (int) –

    • -
    • kappa (Tensor) –

    • +
    • m (int)

    • +
    • kappa (Tensor)

    @@ -876,8 +910,8 @@
    Parameters:
      -
    • m (int) –

    • -
    • kappa (Tensor) –

    • +
    • m (int)

    • +
    • kappa (Tensor)

    @@ -896,9 +930,9 @@
    Parameters:
      -
    • m (int) –

    • -
    • kappa (Tensor) –

    • -
    • kappa_switch (float) –

    • +
    • m (int)

    • +
    • kappa (Tensor)

    • +
    • kappa_switch (float)

    @@ -913,8 +947,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -931,8 +965,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -949,8 +983,8 @@
    Parameters:
      -
    • args (Any) –

    • -
    • kwargs (Any) –

    • +
    • args (Any)

    • +
    • kwargs (Any)

    Return type:
    @@ -1007,7 +1041,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.training.utils.html b/api/graphnet.training.utils.html index e974931d6..616e7ff8c 100644 --- a/api/graphnet.training.utils.html +++ b/api/graphnet.training.utils.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -518,7 +552,7 @@

    Batch

    Parameters:
    -

    graphs (List[Data]) –

    +

    graphs (List[Data])

    @@ -533,7 +567,7 @@ last elements, which will always be 0 and 1 respectively.

    Parameters:
    -

    batch_splits (List[float]) –

    +

    batch_splits (List[float])

    @@ -547,24 +581,24 @@
    Parameters:
      -
    • db (str) –

    • -
    • pulsemaps (str | List[str]) –

    • -
    • graph_definition (GraphDefinition) –

    • -
    • features (List[str]) –

    • -
    • truth (List[str]) –

    • -
    • batch_size (int) –

    • -
    • shuffle (bool) –

    • -
    • selection (List[int] | None) –

    • -
    • num_workers (int) –

    • -
    • persistent_workers (bool) –

    • -
    • node_truth (List[str] | None) –

    • -
    • truth_table (str) –

    • -
    • node_truth_table (str | None) –

    • -
    • string_selection (List[int] | None) –

    • -
    • loss_weight_table (str | None) –

    • -
    • loss_weight_column (str | None) –

    • -
    • index_column (str) –

    • -
    • labels (Dict[str, Callable] | None) –

    • +
    • db (str)

    • +
    • pulsemaps (str | List[str])

    • +
    • graph_definition (GraphDefinition)

    • +
    • features (List[str])

    • +
    • truth (List[str])

    • +
    • batch_size (int)

    • +
    • shuffle (bool)

    • +
    • selection (List[int] | None)

    • +
    • num_workers (int)

    • +
    • persistent_workers (bool)

    • +
    • node_truth (List[str] | None)

    • +
    • truth_table (str)

    • +
    • node_truth_table (str | None)

    • +
    • string_selection (List[int] | None)

    • +
    • loss_weight_table (str | None)

    • +
    • loss_weight_column (str | None)

    • +
    • index_column (str)

    • +
    • labels (Dict[str, Callable] | None)

    @@ -579,26 +613,26 @@
    Parameters:
      -
    • db (str) –

    • -
    • graph_definition (GraphDefinition) –

    • -
    • selection (List[int] | None) –

    • -
    • pulsemaps (str | List[str]) –

    • -
    • features (List[str]) –

    • -
    • truth (List[str]) –

    • -
    • batch_size (int) –

    • -
    • database_indices (List[int] | None) –

    • -
    • seed (int) –

    • -
    • test_size (float) –

    • -
    • num_workers (int) –

    • -
    • persistent_workers (bool) –

    • -
    • node_truth (str | None) –

    • -
    • truth_table (str) –

    • -
    • node_truth_table (str | None) –

    • -
    • string_selection (List[int] | None) –

    • -
    • loss_weight_column (str | None) –

    • -
    • loss_weight_table (str | None) –

    • -
    • index_column (str) –

    • -
    • labels (Dict[str, Callable] | None) –

    • +
    • db (str)

    • +
    • graph_definition (GraphDefinition)

    • +
    • selection (List[int] | None)

    • +
    • pulsemaps (str | List[str])

    • +
    • features (List[str])

    • +
    • truth (List[str])

    • +
    • batch_size (int)

    • +
    • database_indices (List[int] | None)

    • +
    • seed (int)

    • +
    • test_size (float)

    • +
    • num_workers (int)

    • +
    • persistent_workers (bool)

    • +
    • node_truth (str | None)

    • +
    • truth_table (str)

    • +
    • node_truth_table (str | None)

    • +
    • string_selection (List[int] | None)

    • +
    • loss_weight_column (str | None)

    • +
    • loss_weight_table (str | None)

    • +
    • index_column (str)

    • +
    • labels (Dict[str, Callable] | None)

    @@ -613,12 +647,12 @@
    Parameters:
    @@ -633,11 +667,11 @@
    Parameters:
    @@ -707,7 +741,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.training.weight_fitting.html b/api/graphnet.training.weight_fitting.html index 703da8cd7..f3b969fc9 100644 --- a/api/graphnet.training.weight_fitting.html +++ b/api/graphnet.training.weight_fitting.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -490,9 +524,9 @@
    Parameters:
      -
    • database_path (str) –

    • -
    • truth_table (str) –

    • -
    • index_column (str) –

    • +
    • database_path (str)

    • +
    • truth_table (str)

    • +
    • index_column (str)

    @@ -538,9 +572,9 @@
    Parameters:
      -
    • database_path (str) –

    • -
    • truth_table (str) –

    • -
    • index_column (str) –

    • +
    • database_path (str)

    • +
    • truth_table (str)

    • +
    • index_column (str)

    @@ -556,9 +590,9 @@
    Parameters:
      -
    • database_path (str) –

    • -
    • truth_table (str) –

    • -
    • index_column (str) –

    • +
    • database_path (str)

    • +
    • truth_table (str)

    • +
    • index_column (str)

    @@ -612,7 +646,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.argparse.html b/api/graphnet.utilities.argparse.html index 306ca3b0d..57526f2b8 100644 --- a/api/graphnet.utilities.argparse.html +++ b/api/graphnet.utilities.argparse.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -534,7 +568,7 @@

    Construct Options.

    Parameters:
    -

    options (str | Tuple[str, Any]) –

    +

    options (str | Tuple[str, Any])

    @@ -546,7 +580,7 @@

    bool

    Parameters:
    -

    option (str) –

    +

    option (str)

    @@ -559,7 +593,7 @@

    Optional[Any]

    Parameters:
    -

    option (str) –

    +

    option (str)

    @@ -573,8 +607,8 @@
    Parameters:
      -
    • usage (str | None) –

    • -
    • description (str | None) –

    • +
    • usage (str | None)

    • +
    • description (str | None)

    @@ -592,7 +626,7 @@

    ArgumentParser

    Parameters:
    -

    args (str | Tuple[str, Any]) –

    +

    args (str | Tuple[str, Any])

    @@ -646,7 +680,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.base_config.html b/api/graphnet.utilities.config.base_config.html index a43d9ac48..4212984a0 100644 --- a/api/graphnet.utilities.config.base_config.html +++ b/api/graphnet.utilities.config.base_config.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -319,14 +353,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -607,7 +641,7 @@

    BaseConfig

    Parameters:
    -

    path (str) –

    +

    path (str)

    @@ -620,7 +654,7 @@

    Optional[str]

    Parameters:
    -

    path (str | None) –

    +

    path (str | None)

    @@ -663,9 +697,9 @@
    Parameters:
    @@ -719,7 +753,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.configurable.html b/api/graphnet.utilities.config.configurable.html index c8d3bec55..1b48bdbd5 100644 --- a/api/graphnet.utilities.config.configurable.html +++ b/api/graphnet.utilities.config.configurable.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -319,14 +353,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -565,7 +599,7 @@

    None

    Parameters:
    -

    path (str) –

    +

    path (str)

    @@ -578,7 +612,7 @@

    Any

    Parameters:
    -

    source (BaseConfig | str) –

    +

    source (BaseConfig | str)

    @@ -632,7 +666,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.dataset_config.html b/api/graphnet.utilities.config.dataset_config.html index d7f81f110..f643e6c4d 100644 --- a/api/graphnet.utilities.config.dataset_config.html +++ b/api/graphnet.utilities.config.dataset_config.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -319,14 +353,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -843,22 +877,22 @@
    Parameters:
      -
    • path (str | List[str]) –

    • -
    • pulsemaps (str | List[str]) –

    • -
    • features (List[str]) –

    • -
    • truth (List[str]) –

    • -
    • node_truth (List[str] | None) –

    • -
    • index_column (str) –

    • -
    • truth_table (str) –

    • -
    • node_truth_table (str | None) –

    • -
    • string_selection (List[int] | None) –

    • -
    • selection (str | List[str] | List[int | List[int]] | Dict[str, str | List[str]] | None) –

    • -
    • loss_weight_table (str | None) –

    • -
    • loss_weight_column (str | None) –

    • -
    • loss_weight_default_value (float | None) –

    • -
    • seed (int | None) –

    • -
    • graph_definition (Any) –

    • -
    • labels (Dict[str, Any] | None) –

    • +
    • path (str | List[str])

    • +
    • pulsemaps (str | List[str])

    • +
    • features (List[str])

    • +
    • truth (List[str])

    • +
    • node_truth (List[str] | None)

    • +
    • index_column (str)

    • +
    • truth_table (str)

    • +
    • node_truth_table (str | None)

    • +
    • string_selection (List[int] | None)

    • +
    • selection (str | List[str] | List[int | List[int]] | Dict[str, str | List[str]] | None)

    • +
    • loss_weight_table (str | None)

    • +
    • loss_weight_column (str | None)

    • +
    • loss_weight_default_value (float | None)

    • +
    • seed (int | None)

    • +
    • graph_definition (Any)

    • +
    • labels (Dict[str, Any] | None)

    @@ -951,7 +985,7 @@
    -model_fields: ClassVar[dict[str, FieldInfo]] = {'features': FieldInfo(annotation=List[str], required=True), 'graph_definition': FieldInfo(annotation=Any, required=False), 'index_column': FieldInfo(annotation=str, required=False, default='event_no'), 'labels': FieldInfo(annotation=Union[Dict[str, Any], NoneType], required=False), 'loss_weight_column': FieldInfo(annotation=Union[str, NoneType], required=False), 'loss_weight_default_value': FieldInfo(annotation=Union[float, NoneType], required=False), 'loss_weight_table': FieldInfo(annotation=Union[str, NoneType], required=False), 'node_truth': FieldInfo(annotation=Union[List[str], NoneType], required=False), 'node_truth_table': FieldInfo(annotation=Union[str, NoneType], required=False), 'path': FieldInfo(annotation=Union[str, List[str]], required=True), 'pulsemaps': FieldInfo(annotation=Union[str, List[str]], required=True), 'seed': FieldInfo(annotation=Union[int, NoneType], required=False), 'selection': FieldInfo(annotation=Union[str, List[str], List[Union[int, List[int]]], Dict[str, Union[str, List[str]]], NoneType], required=False), 'string_selection': FieldInfo(annotation=Union[List[int], NoneType], required=False), 'truth': FieldInfo(annotation=List[str], required=True), 'truth_table': FieldInfo(annotation=str, required=False, default='truth')}
    +model_fields: ClassVar[dict[str, FieldInfo]] = {'features': FieldInfo(annotation=List[str], required=True), 'graph_definition': FieldInfo(annotation=Any, required=False, default=None), 'index_column': FieldInfo(annotation=str, required=False, default='event_no'), 'labels': FieldInfo(annotation=Union[Dict[str, Any], NoneType], required=False, default=None), 'loss_weight_column': FieldInfo(annotation=Union[str, NoneType], required=False, default=None), 'loss_weight_default_value': FieldInfo(annotation=Union[float, NoneType], required=False, default=None), 'loss_weight_table': FieldInfo(annotation=Union[str, NoneType], required=False, default=None), 'node_truth': FieldInfo(annotation=Union[List[str], NoneType], required=False, default=None), 'node_truth_table': FieldInfo(annotation=Union[str, NoneType], required=False, default=None), 'path': FieldInfo(annotation=Union[str, List[str]], required=True), 'pulsemaps': FieldInfo(annotation=Union[str, List[str]], required=True), 'seed': FieldInfo(annotation=Union[int, NoneType], required=False, default=None), 'selection': FieldInfo(annotation=Union[str, List[str], List[Union[int, List[int]]], Dict[str, Union[str, List[str]]], NoneType], required=False, default=None), 'string_selection': FieldInfo(annotation=Union[List[int], NoneType], required=False, default=None), 'truth': FieldInfo(annotation=List[str], required=True), 'truth_table': FieldInfo(annotation=str, required=False, default='truth')}

    Metadata about the fields defined on the model, mapping of field names to [FieldInfo][pydantic.fields.FieldInfo].

    This replaces Model.__fields__ from Pydantic V1.

    @@ -966,7 +1000,7 @@

    Callable

    Parameters:
    -

    init_fn (Callable) –

    +

    init_fn (Callable)

    @@ -1031,7 +1065,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.html b/api/graphnet.utilities.config.html index 88ea67d59..12c82520e 100644 --- a/api/graphnet.utilities.config.html +++ b/api/graphnet.utilities.config.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -574,7 +608,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.model_config.html b/api/graphnet.utilities.config.model_config.html index 39894fd70..1b5d5a3ed 100644 --- a/api/graphnet.utilities.config.model_config.html +++ b/api/graphnet.utilities.config.model_config.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -319,14 +353,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -636,8 +670,8 @@
    Parameters:
      -
    • class_name (str) –

    • -
    • arguments (Dict[str, Any]) –

    • +
    • class_name (str)

    • +
    • arguments (Dict[str, Any])

    @@ -689,7 +723,7 @@

    Callable

    Parameters:
    -

    init_fn (Callable) –

    +

    init_fn (Callable)

    @@ -754,7 +788,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.parsing.html b/api/graphnet.utilities.config.parsing.html index b2100d068..81328ad85 100644 --- a/api/graphnet.utilities.config.parsing.html +++ b/api/graphnet.utilities.config.parsing.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -319,14 +353,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -571,9 +605,9 @@
    Parameters:
    @@ -587,7 +621,7 @@

    List[ModuleType]

    Parameters:
    -

    packages (module) –

    +

    packages (ModuleType)

    @@ -600,7 +634,7 @@

    Dict[str, type]

    Parameters:
    -

    packages (module) –

    +

    packages (ModuleType)

    @@ -613,7 +647,7 @@

    bool

    Parameters:
    -

    obj (module) –

    +

    obj (ModuleType)

    @@ -626,7 +660,7 @@

    bool

    Parameters:
    -

    obj (type) –

    +

    obj (type)

    @@ -639,7 +673,7 @@

    Dict[str, type]

    Parameters:
    -

    module (module) –

    +

    module (ModuleType)

    @@ -692,7 +726,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.config.training_config.html b/api/graphnet.utilities.config.training_config.html index fb1647bf3..e17e2faf3 100644 --- a/api/graphnet.utilities.config.training_config.html +++ b/api/graphnet.utilities.config.training_config.html @@ -123,10 +123,9 @@ - + - @@ -286,14 +285,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -308,6 +335,13 @@ data + +
  • + + + datasets + +
  • @@ -319,14 +353,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -599,10 +633,10 @@
    Parameters:
      -
    • target (str | List[str]) –

    • -
    • early_stopping_patience (int) –

    • -
    • fit (Dict[str, Any]) –

    • -
    • dataloader (Dict[str, Any]) –

    • +
    • target (str | List[str])

    • +
    • early_stopping_patience (int)

    • +
    • fit (Dict[str, Any])

    • +
    • dataloader (Dict[str, Any])

    @@ -689,7 +723,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.decorators.html b/api/graphnet.utilities.decorators.html index 854e94e51..65f3bd085 100644 --- a/api/graphnet.utilities.decorators.html +++ b/api/graphnet.utilities.decorators.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -491,7 +525,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.deprecation_tools.html b/api/graphnet.utilities.deprecation_tools.html index 4d0500613..c15157044 100644 --- a/api/graphnet.utilities.deprecation_tools.html +++ b/api/graphnet.utilities.deprecation_tools.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -530,7 +564,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.filesys.html b/api/graphnet.utilities.filesys.html index d2965bd8b..2dd5ba95a 100644 --- a/api/graphnet.utilities.filesys.html +++ b/api/graphnet.utilities.filesys.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -505,7 +539,7 @@

    bool

    Parameters:
    -

    filename (str) –

    +

    filename (str)

    @@ -518,7 +552,7 @@

    bool

    Parameters:
    -

    filename (str) –

    +

    filename (str)

    @@ -532,8 +566,8 @@
    Parameters:
    @@ -612,7 +646,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.html b/api/graphnet.utilities.html index a587baa9c..328307d19 100644 --- a/api/graphnet.utilities.html +++ b/api/graphnet.utilities.html @@ -123,10 +123,9 @@ - + - @@ -284,14 +283,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -306,6 +333,13 @@ data + +
  • + + + datasets + +
  • @@ -317,14 +351,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -478,7 +512,6 @@
  • imports
  • @@ -542,7 +575,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.imports.html b/api/graphnet.utilities.imports.html index c91150b43..914c0993c 100644 --- a/api/graphnet.utilities.imports.html +++ b/api/graphnet.utilities.imports.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -394,8 +428,6 @@
  • has_torch_package()
  • -
  • has_pisa_package() -
  • requires_icecube()
  • @@ -416,13 +448,6 @@ has_torch_package() - -
  • - - - has_pisa_package() - -
  • @@ -478,8 +503,6 @@
  • has_torch_package()
  • -
  • has_pisa_package() -
  • requires_icecube()
  • @@ -517,16 +540,6 @@
    -
    -graphnet.utilities.imports.has_pisa_package()[source]
    -

    Check whether the pisa package is available.

    -
    -
    Return type:
    -

    bool

    -
    -
    -
    -
    graphnet.utilities.imports.requires_icecube(test_function)[source]

    Decorate test_function for use only if icecube module is present.

    @@ -535,7 +548,7 @@

    Callable

    Parameters:
    -

    test_function (Callable) –

    +

    test_function (Callable)

    @@ -588,7 +601,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.logging.html b/api/graphnet.utilities.logging.html index af5990025..7357510b4 100644 --- a/api/graphnet.utilities.logging.html +++ b/api/graphnet.utilities.logging.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -635,7 +669,7 @@

    bool

    Parameters:
    -

    record (LogRecord) –

    +

    record (LogRecord)

    @@ -654,11 +688,11 @@
    Parameters:
      -
    • name (str | None) –

    • -
    • class_name (str | None) –

    • -
    • level (int) –

    • -
    • log_folder (str | None) –

    • -
    • kwargs (Any) –

    • +
    • name (str | None)

    • +
    • class_name (str | None)

    • +
    • level (int)

    • +
    • log_folder (str | None)

    • +
    • kwargs (Any)

    @@ -671,7 +705,7 @@

    None

    Parameters:
    -

    level (int) –

    +

    level (int)

    @@ -685,9 +719,9 @@
    Parameters:
    @@ -702,9 +736,9 @@
    Parameters:
    @@ -719,9 +753,9 @@
    Parameters:
    @@ -736,9 +770,9 @@
    Parameters:
    @@ -753,9 +787,9 @@
    Parameters:
    @@ -769,7 +803,7 @@

    None

    Parameters:
    -

    msg (str) –

    +

    msg (str)

    @@ -838,7 +872,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/graphnet.utilities.maths.html b/api/graphnet.utilities.maths.html index cfe0b90e2..45e2a8f40 100644 --- a/api/graphnet.utilities.maths.html +++ b/api/graphnet.utilities.maths.html @@ -123,10 +123,9 @@ - + - @@ -285,14 +284,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -307,6 +334,13 @@ data + +
  • + + + datasets + +
  • @@ -318,14 +352,14 @@
  • - models + exceptions
  • - pisa + models
  • @@ -472,7 +506,7 @@

    Tensor

    Parameters:
    -

    tensor (Tensor) –

    +

    tensor (Tensor)

    @@ -525,7 +559,7 @@ Created using - Sphinx 7.2.6. + Sphinx 7.3.7. and Material for Sphinx diff --git a/api/modules.html b/api/modules.html index 11cc56b2f..eb8ad7c85 100644 --- a/api/modules.html +++ b/api/modules.html @@ -123,10 +123,10 @@ - + + - @@ -281,14 +281,42 @@
  • - Install + Installation
  • - Contribute + Models In GraphNeT + + +
  • +
  • + + + Datasets In GraphNeT + + +
  • +
  • + + + Data Conversion in GraphNeT + + +
  • +
  • + + + Integrating New Experiments into GraphNeT + + +
  • +
  • + + + Contributing To GraphNeT
  • @@ -327,9 +355,10 @@

    srcAPI