Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix orbit cli types #145

Merged
merged 30 commits into from
Dec 1, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
8146a6e
Issue/123/kbo neo issue (#134)
FusRoman Aug 4, 2023
1e840a4
add complexity plots and the associated data (#139)
FusRoman Aug 8, 2023
41dbd23
fix orbit cli types
FusRoman Sep 4, 2023
1d7aed7
add kalman assoc command line
FusRoman Sep 4, 2023
8eeb9f4
working on the cli
FusRoman Sep 4, 2023
f6d8964
fix kalman filter, improve performance and fix bug in the kalman ci, …
FusRoman Sep 6, 2023
033da8c
pep8
FusRoman Sep 6, 2023
135783f
fix predict bug
FusRoman Sep 6, 2023
4583d36
add functionality for roid fitting using polynomial function
FusRoman Oct 31, 2023
9f47ad9
small refactoring
FusRoman Nov 2, 2023
572b591
small fix on the 1.0
FusRoman Nov 20, 2023
f442532
rename notebook into fink_fat_notebook
FusRoman Nov 20, 2023
300b05c
update notebooks for the amateur
FusRoman Nov 27, 2023
5734734
Issue/147/inverse tags (#148)
FusRoman Nov 27, 2023
b0183cd
merge from main
FusRoman Nov 27, 2023
193b841
pep8
FusRoman Nov 27, 2023
1cd986f
Issue/146/rename notebook dir (#150)
FusRoman Nov 27, 2023
365a5a6
[HOTFIX] update gitignore
FusRoman Nov 27, 2023
6e9639b
merge from main
FusRoman Nov 27, 2023
ab80add
Issue/152/fix orbfit params (#153)
FusRoman Nov 29, 2023
857d11b
Issue/155/orbfit timeout (#156)
FusRoman Nov 29, 2023
23a6dc2
Minimal extension for external files (#154)
JulienPeloton Nov 29, 2023
72882fd
merge main
FusRoman Nov 29, 2023
e0e57d6
remove bin dir
FusRoman Nov 29, 2023
5d02d9b
bump to 1.0.0
FusRoman Nov 29, 2023
e897f77
add associations between the new alerts from the stream and the traje…
FusRoman Nov 30, 2023
787ea62
pep8
FusRoman Nov 30, 2023
73ab925
fix kalman test
FusRoman Nov 30, 2023
4234b5e
small fix
FusRoman Dec 1, 2023
428705a
fix assoc stream test
FusRoman Dec 1, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
6 changes: 3 additions & 3 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ per-file-ignores =
../fink-fat/fink_fat/orbit_fitting/orbfit_merger.py:W503
../fink-fat/fink_fat/command_line/fink_fat_cli.py:W503
../fink-fat/fink_fat/test/test_sample.py:F601
../fink-fat/notebook/results_analysis/utils.py:W503
../fink-fat/notebook/parameters_selection/exploring_script.py:E721
../fink-fat/notebook/follow_up/ephem_association.py:W503
../fink-fat/fink_fat_notebook/results_analysis/utils.py:W503
../fink-fat/fink_fat_notebook/parameters_selection/exploring_script.py:E721
../fink-fat/fink_fat_notebook/follow_up/ephem_association.py:W503
../fink-fat/fink_fat/seeding/dbscan_seeding.py:W503
../fink-fat/fink_fat/command_line/cli_main/stats.py:W503
../fink-fat/fink_fat/kalman/asteroid_kalman.py:W503
Expand Down
9 changes: 7 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ mpcobs/
OrbitFit
asteroid_study/*.png
asteroid_study/residuals/
trajectory_df.parquet
# trajectory_df.parquet

fink_fat/others/perf_test

Expand Down Expand Up @@ -182,4 +182,9 @@ merge_traj.parquet
test_merger/
track_bug/
transient_data.json
fink_fat_test/
fink_fat_test/

fink_fat_notebook/fink_fat_experiments/confirmed_all_fink_fat/
fink_fat_notebook/fink_fat_experiments/confirmed_mops_fink_fat/
fink_fat_notebook/fink_fat_experiments/data/
fink_fat_notebook/parameters_selection/res_orbit_nb_point/
36 changes: 18 additions & 18 deletions docker/centos7/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -39,37 +39,37 @@ COPY . fink-fat

# Install system build dependencies
RUN yum -y update \
&& yum -y install which git wget java-11-openjdk-devel \
&& echo "export JAVA_HOME=$(dirname $(dirname $(readlink -f $(type -P java))))" > /etc/profile.d/javahome.sh \
&& yum -y groupinstall "Development Tools" \
&& yum -y clean all \
&& rm -rf /var/cache
&& yum -y install which git wget java-11-openjdk-devel \
&& echo "export JAVA_HOME=$(dirname $(dirname $(readlink -f $(type -P java))))" > /etc/profile.d/javahome.sh \
&& yum -y groupinstall "Development Tools" \
&& yum -y clean all \
&& rm -rf /var/cache
# && echo "export JAVA_HOME=$(dirname $(dirname $(readlink -f $(type -P java))))" > /etc/profile.d/javahome.sh

# install python and the dependencies
RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-${PYTHON_VERSION}-Linux-x86_64.sh -O ~/miniconda.sh \
&& bash ~/miniconda.sh -b -p ${USRLIBS}/miniconda
&& bash ~/miniconda.sh -b -p ${USRLIBS}/miniconda

RUN pip install --no-cache-dir --upgrade pip setuptools wheel \
&& cd ${USRLIBS}/fink-fat/script \
&& source ./install_python_deps.sh \
&& cd ${USRLIBS}
&& cd ${USRLIBS}/fink-fat/script \
&& source ./install_python_deps.sh \
&& cd ${USRLIBS}


# install spark
RUN wget --quiet https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}.tgz \
&& tar -xf spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}.tgz \
&& rm spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}.tgz \
&& echo "export SPARK_HOME=$PWD/spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}" > /etc/profile.d/sparkhome.sh
&& tar -xf spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}.tgz \
&& rm spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}.tgz \
&& echo "export SPARK_HOME=$PWD/spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}" > /etc/profile.d/sparkhome.sh

# install OrbFit
RUN yum -y install epel-release \
&& yum -y install aria2 \
&& mkdir OrbFit \
&& cd ${USRLIBS}/fink-fat/script \
&& source ./orbFit_installer.sh --install_path ${USRLIBS}/OrbFit \
&& echo "export ORBFIT_HOME=${USRLIBS}/OrbFit" > /etc/profile.d/orbfithome.sh \
&& cd ${USRLIBS}
&& yum -y install aria2 \
&& mkdir OrbFit \
&& cd ${USRLIBS}/fink-fat/script \
&& source ./orbFit_installer.sh --install_path ${USRLIBS}/OrbFit \
&& echo "export ORBFIT_HOME=${USRLIBS}/OrbFit" > /etc/profile.d/orbfithome.sh \
&& cd ${USRLIBS}

ENV SPARK_HOME=$USRLIBS/spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}
ENV SPARKLIB=${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.10.9-src.zip
Expand Down
2 changes: 1 addition & 1 deletion fink_fat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.15.2"
__version__ = "1.0.0"
109 changes: 78 additions & 31 deletions fink_fat/associations/association_kalman.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from copy import deepcopy

from fink_fat.others.utils import repeat_chunk
from fink_fat.others.utils import LoggerNewLine


def update_kalman(
Expand All @@ -19,7 +20,7 @@ def update_kalman(
tr_id: int,
) -> pd.Series:
"""
Update the kalman filter contains in the
Update the kalman filter in input inplace.

Parameters
----------
Expand All @@ -43,29 +44,13 @@ def update_kalman(
pd.Series
same row as input but updated with the new alert
"""
Y = np.array(
[
[
ra_alert,
dec_alert,
]
]
)

dt = jd_alert - kalman_copy["jd_1"].values[0]
A = np.array(
[
[1, 0, dt, 0],
[0, 1, 0, dt],
[0, 0, 1, 0],
[0, 0, 0, 1],
]
)

kalman_copy["kalman"].values[0].update(
Y,
A,
)
if dt == 0:
# if the dt is 0, the previous data point of the kalman are in the same exposure.
# it happens only for the assocations between the tracklets and the kalman
# and skip this point is not too bad for the rest of the kalman estimation.
return kalman_copy
with pd.option_context("mode.chained_assignment", None):
kalman_copy["ra_0"] = kalman_copy["ra_1"]
kalman_copy["dec_0"] = kalman_copy["dec_1"]
Expand All @@ -87,6 +72,29 @@ def update_kalman(
) / kalman_copy["dt"]
kalman_copy["trajectory_id"] = tr_id

A = np.array(
[
[1, 0, dt, 0],
[0, 1, 0, dt],
[0, 0, 1, 0],
[0, 0, 0, 1],
]
)

Y = np.array(
[
[ra_alert],
[dec_alert],
[kalman_copy["vel_ra"].values[0]],
[kalman_copy["vel_dec"].values[0]],
]
)

kalman_copy["kalman"].values[0].update(
Y,
A,
)

return kalman_copy


Expand Down Expand Up @@ -182,7 +190,11 @@ def trajectory_extension(


def tracklets_associations(
trajectory_df: pd.DataFrame, kalman_df: pd.DataFrame, new_alerts: pd.DataFrame
trajectory_df: pd.DataFrame,
kalman_df: pd.DataFrame,
new_alerts: pd.DataFrame,
logger: LoggerNewLine,
verbose: bool,
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""
Associates the intra night trajectories with the kalman trajectories.
Expand All @@ -195,6 +207,10 @@ def tracklets_associations(
dataframe containing the kalman filters informations
new_alerts : pd.DataFrame
new associateds alerts from the new observing night.
logger : LoggerNewLine
logger class used to print the logs
verbose : bool
if true, print the logs

Returns
-------
Expand Down Expand Up @@ -241,12 +257,21 @@ def tracklets_associations(
new_tr_id += 1

if len(res_updated_traj) == 0:
if verbose:
logger.info(
"no associations between the intra night tracklets and the trajectories"
)
return (
pd.DataFrame(columns=trajectory_df.columns),
pd.DataFrame(columns=kalman_df.columns),
new_tr_id,
)

if verbose:
logger.info(
f"number of associations between the intra night tracklets and the trajectories: {len(res_updated_kalman)}"
)

# merge the extended trajectories
all_extended_traj = pd.concat(res_updated_traj)
all_new_kalman = pd.concat(res_updated_kalman)
Expand All @@ -259,6 +284,8 @@ def single_alerts_associations(
kalman_df: pd.DataFrame,
new_alerts: pd.DataFrame,
max_tr_id: int,
logger: LoggerNewLine,
verbose: bool,
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""
Associates the single alerts with the kalman trajectories
Expand All @@ -273,6 +300,10 @@ def single_alerts_associations(
new associateds alerts from the new observing night.
max_tr_id : int
maximum trajectory id to assign to the new kalman trajectories
logger : LoggerNewLine
logger class used to print the logs
verbose : bool
if true, print the logs

Returns
-------
Expand All @@ -290,6 +321,10 @@ def single_alerts_associations(
cluster_df = cluster_df.sort_values("estimator_id")

if len(cluster_df) == 0:
if verbose:
logger.info(
"no associations between the single alerts and the trajectories"
)
return pd.DataFrame(columns=trajectory_df.columns), pd.DataFrame(
columns=kalman_df.columns
)
Expand All @@ -310,6 +345,10 @@ def single_alerts_associations(
].values
]
)
if verbose:
logger.info(
f"number of kalman trajectories to updated with single alert: {len(new_kalman)}"
)
traj_to_update = (
trajectory_df[trajectory_df["trajectory_id"].isin(cluster_df["estimator_id"])]
.sort_values(["trajectory_id", "jd"])
Expand All @@ -324,10 +363,6 @@ def single_alerts_associations(
nb_repeat = np.repeat(traj_size.values, traj_counts_duplicates.values)
tr_id_repeat = np.repeat(cluster_df["trajectory_id"].values, nb_repeat)

traj_duplicate["trajectory_id"] = tr_id_repeat
nb_repeat = np.repeat(traj_size.values, traj_counts_duplicates.values)
tr_id_repeat = np.repeat(cluster_df["trajectory_id"].values, nb_repeat)

traj_duplicate["trajectory_id"] = tr_id_repeat
new_traj = pd.concat([traj_duplicate, cluster_df.drop("estimator_id", axis=1)])
return new_traj, new_kalman
Expand All @@ -337,6 +372,8 @@ def kalman_association(
trajectory_df: pd.DataFrame,
kalman_df: pd.DataFrame,
new_alerts: pd.DataFrame,
logger: LoggerNewLine,
verbose: bool,
confirmed_sso: bool = False,
) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""
Expand All @@ -352,6 +389,10 @@ def kalman_association(
new_alerts : pd.DataFrame
dataframe containing the alerts from the current nigth to associates with the kalman filters,
the dataframe must contains the trajectory_id corresponding to the seeds find by the seeding.
logger : LoggerNewLine
logger class used to print the logs
verbose : bool
if true, print the logs
confirmed_sso : boolean
if true, used the confirmed sso (for test purpose)

Expand All @@ -370,7 +411,12 @@ def kalman_association(
roid_flag = [3, 4]
else:
roid_flag = [1, 2, 4]
new_alerts = new_alerts[new_alerts["roid"].isin(roid_flag)]

if verbose:
logger.info("- start the association with the kalman filters")
new_alerts = new_alerts[new_alerts["roid"].isin(roid_flag)].explode(
["estimator_id", "ffdistnr"]
)
new_alerts = new_alerts.explode(["ffdistnr", "estimator_id"])
new_alerts["estimator_id"] = new_alerts["estimator_id"].fillna(-1).astype(int)
traj_id_to_update = np.sort(new_alerts["estimator_id"].unique())
Expand All @@ -382,10 +428,10 @@ def kalman_association(
non_kalman_update = kalman_df[~mask_kalman_update]

res_tr, res_kalman, max_tr_id = tracklets_associations(
trajectory_df, kalman_df, new_alerts
trajectory_df, kalman_df, new_alerts, logger, verbose
)
new_traj, new_kalman = single_alerts_associations(
trajectory_df, kalman_df, new_alerts, max_tr_id
trajectory_df, kalman_df, new_alerts, max_tr_id, logger, verbose
)

new_traj = pd.concat([res_tr, new_traj])
Expand All @@ -398,7 +444,8 @@ def kalman_association(

traj_results = pd.concat([non_tr_update_df, new_traj])
kalman_results = pd.concat([non_kalman_update, new_kalman])

if verbose:
logger.newline()
return traj_results, kalman_results


Expand Down
7 changes: 6 additions & 1 deletion fink_fat/associations/association_orbit.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,16 @@ def orbit_associations(
Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]
the new orbits, the updated trajectories and the old orbits
"""
if verbose:
logger.info("start the associations with the orbits")
orbit_cols_to_keep = list(orbits.columns)
traj_cols_to_keep = list(trajectory_df.columns)
if "ffdistnr" not in traj_cols_to_keep:
traj_cols_to_keep.append("ffdistnr")

orbit_alert_assoc = (
new_alerts[new_alerts["roid"] == 5]
.explode(["estimator_id", "ffdistnr"])
.rename({"estimator_id": "ssoCandId"}, axis=1)
.drop("roid", axis=1)
)
Expand All @@ -87,10 +90,11 @@ def orbit_associations(
logger.info(f"number of orbits to update: {len(updated_sso_id)}")

duplicated_id = orbit_alert_assoc[
orbit_alert_assoc[["ssoCandId", "jd"]].duplicated()
orbit_alert_assoc[["ssoCandId", "candid"]].duplicated()
]["ssoCandId"]
assert len(duplicated_id) == 0

# recompute the orbit using local or cluster mode
new_orbit_pdf = switch_local_cluster(config, traj_to_new_orbit)

# remove the failed orbits
Expand All @@ -99,6 +103,7 @@ def orbit_associations(
logger.info(
f"number of successfull updated orbits: {len(new_orbit_pdf)} ({(len(new_orbit_pdf) / len(updated_sso_id)) * 100} %)"
)
logger.newline(2)
new_orbit_pdf["ssoCandId"] = new_orbit_pdf["trajectory_id"].map(trid_to_ssoid)
updated_id = new_orbit_pdf["ssoCandId"]

Expand Down
Loading
Loading