Skip to content

Commit

Permalink
Merge branch 'main' into RNN_Attention
Browse files Browse the repository at this point in the history
merge from main
  • Loading branch information
Aske-Rosted committed Feb 16, 2024
2 parents 448674f + 2adc935 commit db5eb2c
Show file tree
Hide file tree
Showing 56 changed files with 1,449 additions and 1,229 deletions.
30 changes: 18 additions & 12 deletions examples/01_icetray/01_convert_i3_files.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
"""Example of converting I3-files to SQLite and Parquet."""

import os
from glob import glob

from graphnet.constants import EXAMPLE_OUTPUT_DIR, TEST_DATA_DIR
from graphnet.data.extractors import (
from graphnet.data.extractors.icecube import (
I3FeatureExtractorIceCubeUpgrade,
I3FeatureExtractorIceCube86,
I3RetroExtractor,
Expand Down Expand Up @@ -41,17 +42,22 @@ def main_icecube86(backend: str) -> None:

inputs = [f"{TEST_DATA_DIR}/i3/oscNext_genie_level7_v02"]
outdir = f"{EXAMPLE_OUTPUT_DIR}/convert_i3_files/ic86"
gcd_rescue = glob(
"{TEST_DATA_DIR}/i3/oscNext_genie_level7_v02/*GeoCalib*"
)[0]

converter: DataConverter = CONVERTER_CLASS[backend](
[
converter = CONVERTER_CLASS[backend](
extractors=[
I3FeatureExtractorIceCube86("SRTInIcePulses"),
I3TruthExtractor(),
],
outdir,
outdir=outdir,
gcd_rescue=gcd_rescue,
workers=1,
)
converter(inputs)
if backend == "sqlite":
converter.merge_files(os.path.join(outdir, "merged"))
converter.merge_files()


def main_icecube_upgrade(backend: str) -> None:
Expand All @@ -61,25 +67,25 @@ def main_icecube_upgrade(backend: str) -> None:

inputs = [f"{TEST_DATA_DIR}/i3/upgrade_genie_step4_140028_000998"]
outdir = f"{EXAMPLE_OUTPUT_DIR}/convert_i3_files/upgrade"
gcd_rescue = glob(
"{TEST_DATA_DIR}/i3/upgrade_genie_step4_140028_000998/*GeoCalib*"
)[0]
workers = 1

converter: DataConverter = CONVERTER_CLASS[backend](
[
extractors=[
I3TruthExtractor(),
I3RetroExtractor(),
I3FeatureExtractorIceCubeUpgrade("I3RecoPulseSeriesMap_mDOM"),
I3FeatureExtractorIceCubeUpgrade("I3RecoPulseSeriesMap_DEgg"),
],
outdir,
outdir=outdir,
workers=workers,
# nb_files_to_batch=10,
# sequential_batch_pattern="temp_{:03d}",
# input_file_batch_pattern="[A-Z]{1}_[0-9]{5}*.i3.zst",
icetray_verbose=1,
gcd_rescue=gcd_rescue,
)
converter(inputs)
if backend == "sqlite":
converter.merge_files(os.path.join(outdir, "merged"))
converter.merge_files()


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion examples/01_icetray/02_compare_sqlite_and_parquet.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from graphnet.data.sqlite import SQLiteDataConverter
from graphnet.data.parquet import ParquetDataConverter
from graphnet.data.dataset import SQLiteDataset, ParquetDataset
from graphnet.data.extractors import (
from graphnet.data.extractors.icecube import (
I3FeatureExtractorIceCube86,
I3TruthExtractor,
I3RetroExtractor,
Expand Down
2 changes: 1 addition & 1 deletion examples/01_icetray/03_i3_deployer_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
PRETRAINED_MODEL_DIR,
)
from graphnet.data.constants import FEATURES, TRUTH
from graphnet.data.extractors.i3featureextractor import (
from graphnet.data.extractors.icecube import (
I3FeatureExtractorIceCubeUpgrade,
)
from graphnet.utilities.argparse import ArgumentParser
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from typing import TYPE_CHECKING, List, Sequence

from graphnet.data.constants import FEATURES
from graphnet.data.extractors.i3featureextractor import (
from graphnet.data.extractors.icecube import (
I3FeatureExtractorIceCubeUpgrade,
)
from graphnet.constants import (
Expand Down
7 changes: 5 additions & 2 deletions src/graphnet/data/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
"""Modules for converting and ingesting data.
`graphnet.data` enables converting domain-specific data to industry-standard,
intermediate file formats and reading this data.
intermediate file formats and reading this data.
"""
from .filters import I3Filter, I3FilterMask
from .extractors.icecube.utilities.i3_filters import I3Filter, I3FilterMask
from .dataconverter import DataConverter
from .pre_configured import I3ToParquetConverter
from .pre_configured import I3ToSQLiteConverter
10 changes: 10 additions & 0 deletions src/graphnet/data/dataclasses.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
"""Module containing experiment-specific dataclasses."""


from dataclasses import dataclass


@dataclass
class I3FileSet: # noqa: D101
i3_file: str
gcd_file: str
Loading

0 comments on commit db5eb2c

Please sign in to comment.