Skip to content

Commit

Permalink
Update paths
Browse files Browse the repository at this point in the history
  • Loading branch information
JulienPeloton committed Jan 18, 2024
1 parent 82da46b commit 5cedfac
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion fink_broker/hbaseUtils.py
Original file line number Diff line number Diff line change
Expand Up @@ -883,7 +883,7 @@ def cast_features(df):
root, "fink-alert-schemas/ztf/template_schema_ZTF_3p3.avro")

globs["ztf_alert_sample_scidatabase"] = os.path.join(
root, "online/science")
root, "online/science/20200101")

# Run the Spark test suite
spark_unit_tests(globs, withstreaming=False)
8 changes: 4 additions & 4 deletions fink_broker/partitioning.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2020-2022 AstroLab Software
# Copyright 2020-2024 AstroLab Software
# Author: Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand Down Expand Up @@ -43,7 +43,7 @@ def convert_to_millitime(jd: pd.Series, format=None, now=None):
Examples
----------
>>> from fink_broker.sparkUtils import load_parquet_files
>>> df = load_parquet_files("online/raw")
>>> df = load_parquet_files("online/raw/20200101")
>>> df = df.withColumn('millis', convert_to_millitime(df['candidate.jd']))
>>> pdf = df.select('millis').toPandas()
Expand Down Expand Up @@ -100,7 +100,7 @@ def convert_to_datetime(jd: pd.Series, format=None) -> pd.Series:
Examples
----------
>>> from fink_broker.sparkUtils import load_parquet_files
>>> df = load_parquet_files("online/raw")
>>> df = load_parquet_files("online/raw/20200101")
>>> df = df.withColumn('datetime', convert_to_datetime(df['candidate.jd']))
>>> pdf = df.select('datetime').toPandas()
"""
Expand Down Expand Up @@ -131,7 +131,7 @@ def numPart(df, partition_size=128.):
Examples
----------
>>> from fink_broker.sparkUtils import load_parquet_files
>>> df = load_parquet_files("online/raw")
>>> df = load_parquet_files("online/raw/20200101")
>>> numpart = numPart(df, partition_size=128.)
>>> print(numpart)
1
Expand Down
2 changes: 1 addition & 1 deletion fink_broker/science.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,7 @@ def apply_science_modules_elasticc(df: DataFrame) -> DataFrame:
globs = globals()
root = os.environ['FINK_HOME']
globs["ztf_alert_sample"] = os.path.join(
root, "online/raw")
root, "online/raw/20200101")

globs['elasticc_alert_sample'] = os.path.join(
root, "datasim/elasticc_alerts")
Expand Down
4 changes: 2 additions & 2 deletions fink_broker/sparkUtils.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ def connect_to_raw_database(basepath: str, path: str, latestfirst: bool) -> Data
Examples
----------
>>> dfstream_tmp = connect_to_raw_database(
... "online/raw", "online/raw/*", True)
... "online/raw/20200101", "online/raw/20200101", True)
>>> dfstream_tmp.isStreaming
True
"""
Expand Down Expand Up @@ -485,7 +485,7 @@ def list_hdfs_files(hdfs_path='archive/science/year=2023/month=06/day=25'):
globs = globals()
root = os.environ['FINK_HOME']
globs["ztf_alert_sample"] = os.path.join(
root, "online/raw")
root, "online/raw/20200101")

globs["ztf_avro_sample"] = os.path.join(
root, "fink-alert-schemas/ztf/template_schema_ZTF_3p3.avro")
Expand Down

0 comments on commit 5cedfac

Please sign in to comment.