Skip to content

Commit

Permalink
merge changes from main
Browse files Browse the repository at this point in the history
Merge branch 'main' into 106-outlying-photometry-checker
  • Loading branch information
jrob93 committed May 13, 2024
2 parents 5149651 + 3ecce45 commit e542832
Show file tree
Hide file tree
Showing 17 changed files with 611 additions and 150 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/smoke-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
strategy:
matrix:
os: ['macos-latest','ubuntu-latest']
python-version: ['3.9', '3.10', '3.11']
python-version: ['3.10', '3.11', '3.12']

runs-on: ${{ matrix.os }}
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/testing-and-coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
strategy:
matrix:
os: ['macos-latest','ubuntu-latest']
python-version: ['3.9', '3.10', '3.11']
python-version: ['3.10', '3.11', '3.12']
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
Expand Down
57 changes: 48 additions & 9 deletions src/adler/adler.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,35 @@
import logging
import argparse
import astropy.units as u

from adler.dataclasses.AdlerPlanetoid import AdlerPlanetoid
from adler.science.PhaseCurve import PhaseCurve
from adler.utilities.AdlerCLIArguments import AdlerCLIArguments
from adler.utilities.adler_logging import setup_adler_logging

logger = logging.getLogger(__name__)

def runAdler(args):
planetoid = AdlerPlanetoid.construct_from_RSP(args.ssoid, args.filter_list, args.date_range)

def runAdler(cli_args):
logger.info("Beginning Adler.")
logger.info("Ingesting all data for object {} from RSP...".format(cli_args.ssObjectId))

planetoid = AdlerPlanetoid.construct_from_RSP(
cli_args.ssObjectId, cli_args.filter_list, cli_args.date_range
)

logger.info("Data successfully ingested.")
logger.info("Calculating phase curves...")

# now let's do some phase curves!

# get the r filter SSObject metadata
sso_r = planetoid.SSObject_in_filter("r")

# get the RSP r filter model
pc = PhaseCurve(
abs_mag=planetoid.SSObject.H[2] * u.mag,
phase_param=planetoid.SSObject.G12[2],
abs_mag=sso_r.H * u.mag,
phase_param=sso_r.G12,
model_name="HG12_Pen16",
)
print(pc)
Expand All @@ -31,11 +47,16 @@ def runAdler(args):


def main():
parser = argparse.ArgumentParser(description="Runs Adler for a select planetoid and given user input.")
parser = argparse.ArgumentParser(description="Runs Adler for select planetoid(s) and given user input.")

parser.add_argument("-s", "--ssoid", help="SSObject ID of planetoid.", type=str, required=True)
parser.add_argument("-s", "--ssObjectId", help="SSObject ID of planetoid.", type=str, required=True)
parser.add_argument(
"-f", "--filters", help="Comma-separated list of filters required.", type=str, default="u,g,r,i,z,y"
"-f",
"--filter_list",
help="Filters required.",
nargs="*",
type=str,
default=["u", "g", "r", "i", "z", "y"],
)
parser.add_argument(
"-d",
Expand All @@ -45,12 +66,30 @@ def main():
type=float,
default=[60000.0, 67300.0],
)
parser.add_argument(
"-o",
"--outpath",
help="Output path location. Default is current working directory.",
type=str,
default="./",
)
parser.add_argument(
"-n",
"--db_name",
help="Stem filename of output database. If this doesn't exist, it will be created. Default: adler_out.",
type=str,
default="adler_out",
)

args = parser.parse_args()

args.filter_list = args.filters.split(",")
cli_args = AdlerCLIArguments(args)

adler_logger = setup_adler_logging(cli_args.outpath)

cli_args.logger = adler_logger

runAdler(args)
runAdler(cli_args)


if __name__ == "__main__":
Expand Down
12 changes: 12 additions & 0 deletions src/adler/dataclasses/AdlerData.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import os
import sqlite3
import logging
import numpy as np
from dataclasses import dataclass, field
from datetime import datetime, timezone
Expand All @@ -15,6 +16,8 @@
"phase_parameter_2_err",
]

logger = logging.getLogger(__name__)


@dataclass
class AdlerData:
Expand Down Expand Up @@ -70,10 +73,12 @@ def populate_phase_parameters(self, filter_name, **kwargs):
try:
filter_index = self.filter_list.index(filter_name)
except ValueError:
logger.error("ValueError: Filter {} does not exist in AdlerData.filter_list.".format(filter_name))
raise ValueError("Filter {} does not exist in AdlerData.filter_list.".format(filter_name))

# if model-dependent parameters exist without a model name, return an error
if not kwargs.get("model_name") and any(name in kwargs for name in MODEL_DEPENDENT_KEYS):
logger.error("NameError: No model name given. Cannot update model-specific phase parameters.")
raise NameError("No model name given. Cannot update model-specific phase parameters.")

# update the value if it's in **kwargs
Expand Down Expand Up @@ -163,6 +168,7 @@ def get_phase_parameters_in_filter(self, filter_name, model_name=None):
try:
filter_index = self.filter_list.index(filter_name)
except ValueError:
logger.error("ValueError: Filter {} does not exist in AdlerData.filter_list.".format(filter_name))
raise ValueError("Filter {} does not exist in AdlerData.filter_list.".format(filter_name))

output_obj = PhaseParameterOutput()
Expand All @@ -173,11 +179,17 @@ def get_phase_parameters_in_filter(self, filter_name, model_name=None):
output_obj.arc = self.filter_dependent_values[filter_index].arc

if not model_name:
logger.warn("No model name was specified. Returning non-model-dependent phase parameters.")
print("No model name specified. Returning non-model-dependent phase parameters.")
else:
try:
model_index = self.filter_dependent_values[filter_index].model_list.index(model_name)
except ValueError:
logger.error(
"ValueError: Model {} does not exist for filter {} in AdlerData.model_lists.".format(
model_name, filter_name
)
)
raise ValueError(
"Model {} does not exist for filter {} in AdlerData.model_lists.".format(
model_name, filter_name
Expand Down
66 changes: 63 additions & 3 deletions src/adler/dataclasses/AdlerPlanetoid.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
from lsst.rsp import get_tap_service
import pandas as pd
import logging

from adler.dataclasses.Observations import Observations
from adler.dataclasses.MPCORB import MPCORB
from adler.dataclasses.SSObject import SSObject
from adler.dataclasses.AdlerData import AdlerData
from adler.dataclasses.dataclass_utilities import get_data_table

logger = logging.getLogger(__name__)


class AdlerPlanetoid:
"""AdlerPlanetoid class. Contains the Observations, MPCORB and SSObject dataclass objects."""
Expand Down Expand Up @@ -80,12 +83,28 @@ def construct_from_SQL(
"""

if len(date_range) != 2:
logger.error("ValueError: date_range attribute must be of length 2.")
raise ValueError("date_range attribute must be of length 2.")

observations_by_filter = cls.populate_observations(
cls, ssObjectId, filter_list, date_range, sql_filename=sql_filename, schema=schema
)

if len(observations_by_filter) == 0:
logger.error(
"No observations found for this object in the given filter(s). Check SSOID and try again."
)
raise Exception(
"No observations found for this object in the given filter(s). Check SSOID and try again."
)

if len(filter_list) > len(observations_by_filter):
logger.info(
"Not all specified filters have observations. Recalculating filter list based on past observations."
)
filter_list = [obs_object.filter_name for obs_object in observations_by_filter]
logger.info("New filter list is: {}".format(filter_list))

mpcorb = cls.populate_MPCORB(cls, ssObjectId, sql_filename=sql_filename, schema=schema)
ssobject = cls.populate_SSObject(
cls, ssObjectId, filter_list, sql_filename=sql_filename, schema=schema
Expand Down Expand Up @@ -119,10 +138,29 @@ def construct_from_RSP(
raise Exception("date_range argument must be of length 2.")

service = get_tap_service("ssotap")
logger.info("Getting past observations from DIASource/SSSource...")
observations_by_filter = cls.populate_observations(
cls, ssObjectId, filter_list, date_range, service=service
)

if len(observations_by_filter) == 0:
logger.error(
"No observations found for this object in the given filter(s). Check SSOID and try again."
)
raise Exception(
"No observations found for this object in the given filter(s). Check SSOID and try again."
)

if len(filter_list) > len(observations_by_filter):
logger.info(
"Not all specified filters have observations. Recalculating filter list based on past observations."
)
filter_list = [obs_object.filter_name for obs_object in observations_by_filter]
logger.info("New filter list is: {}".format(filter_list))

logger.info("Populating MPCORB metadata...")
mpcorb = cls.populate_MPCORB(cls, ssObjectId, service=service)
logger.info("Populating SSObject metadata...")
ssobject = cls.populate_SSObject(cls, ssObjectId, filter_list, service=service)

adler_data = AdlerData(ssObjectId, filter_list)
Expand Down Expand Up @@ -185,9 +223,21 @@ def populate_observations(

data_table = get_data_table(observations_sql_query, service=service, sql_filename=sql_filename)

observations_by_filter.append(
Observations.construct_from_data_table(ssObjectId, filter_name, data_table)
)
if len(data_table) == 0:
logger.warning(
"No observations found in {} filter for this object. Skipping this filter.".format(
filter_name
)
)
print(
"WARNING: No observations found in {} filter for this object. Skipping this filter.".format(
filter_name
)
)
else:
observations_by_filter.append(
Observations.construct_from_data_table(ssObjectId, filter_name, data_table)
)

return observations_by_filter

Expand Down Expand Up @@ -228,6 +278,10 @@ def populate_MPCORB(self, ssObjectId, service=None, sql_filename=None, schema="d

data_table = get_data_table(MPCORB_sql_query, service=service, sql_filename=sql_filename)

if len(data_table) == 0:
logger.error("No MPCORB data for this object could be found for this SSObjectId.")
raise Exception("No MPCORB data for this object could be found for this SSObjectId.")

return MPCORB.construct_from_data_table(ssObjectId, data_table)

def populate_SSObject(
Expand Down Expand Up @@ -282,6 +336,10 @@ def populate_SSObject(

data_table = get_data_table(SSObject_sql_query, service=service, sql_filename=sql_filename)

if len(data_table) == 0:
logger.error("No SSObject data for this object could be found for this SSObjectId.")
raise Exception("No SSObject data for this object could be found for this SSObjectId.")

return SSObject.construct_from_data_table(ssObjectId, filter_list, data_table)

def observations_in_filter(self, filter_name):
Expand All @@ -302,6 +360,7 @@ def observations_in_filter(self, filter_name):
try:
filter_index = self.filter_list.index(filter_name)
except ValueError:
logger.error("ValueError: Filter {} is not in AdlerPlanetoid.filter_list.".format(filter_name))
raise ValueError("Filter {} is not in AdlerPlanetoid.filter_list.".format(filter_name))

return self.observations_by_filter[filter_index]
Expand All @@ -324,6 +383,7 @@ def SSObject_in_filter(self, filter_name):
try:
filter_index = self.filter_list.index(filter_name)
except ValueError:
logger.error("ValueError: Filter {} is not in AdlerPlanetoid.filter_list.".format(filter_name))
raise ValueError("Filter {} is not in AdlerPlanetoid.filter_list.".format(filter_name))

return self.SSObject.filter_dependent_values[filter_index]
52 changes: 22 additions & 30 deletions src/adler/dataclasses/MPCORB.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,22 @@

from adler.dataclasses.dataclass_utilities import get_from_table

MPCORB_KEYS = {
"mpcDesignation": str,
"mpcNumber": int,
"mpcH": float,
"mpcG": float,
"epoch": float,
"peri": float,
"node": float,
"incl": float,
"e": float,
"n": float,
"q": float,
"uncertaintyParameter": str,
"flags": str,
}


@dataclass
class MPCORB:
Expand Down Expand Up @@ -87,33 +103,9 @@ def construct_from_data_table(cls, ssObjectId, data_table):
"""

mpcDesignation = get_from_table(data_table, "mpcDesignation", "str")
mpcNumber = get_from_table(data_table, "mpcNumber", "int")
mpcH = get_from_table(data_table, "mpcH", "float")
mpcG = get_from_table(data_table, "mpcG", "float")
epoch = get_from_table(data_table, "epoch", "float")
peri = get_from_table(data_table, "peri", "float")
node = get_from_table(data_table, "node", "float")
incl = get_from_table(data_table, "incl", "float")
e = get_from_table(data_table, "e", "float")
n = get_from_table(data_table, "n", "float")
q = get_from_table(data_table, "q", "float")
uncertaintyParameter = get_from_table(data_table, "uncertaintyParameter", "str")
flags = get_from_table(data_table, "flags", "str")

return cls(
ssObjectId,
mpcDesignation,
mpcNumber,
mpcH,
mpcG,
epoch,
peri,
node,
incl,
e,
n,
q,
uncertaintyParameter,
flags,
)
mpcorb_dict = {"ssObjectId": ssObjectId}

for mpcorb_key, mpcorb_type in MPCORB_KEYS.items():
mpcorb_dict[mpcorb_key] = get_from_table(data_table, mpcorb_key, mpcorb_type, "MPCORB")

return cls(**mpcorb_dict)
Loading

0 comments on commit e542832

Please sign in to comment.