Skip to content

Commit

Permalink
added entrypoint
Browse files Browse the repository at this point in the history
  • Loading branch information
davedavemckay committed Jun 25, 2024
1 parent 1a21d58 commit 642711b
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 15 deletions.
6 changes: 5 additions & 1 deletion echo-side/containers/basic_env/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,12 @@ RUN conda install -y -c conda-forge mamba && mamba init

COPY environment.yaml /environment.yaml

RUN mamba env update --file /environment.yaml
RUN mamba env create --name lsst-uk --file=/environment.yaml

RUN git clone https://github.com/lsst-uk/csd3-echo-somerville.git

RUN cd csd3-echo-somerville && python -m pip install .

COPY entrypoint.sh /entrypoint.sh

ENTRYPOINT ["/entrypoint.sh"]
28 changes: 14 additions & 14 deletions echo-side/dags/monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

def run_on_new_file(**kwargs):
s3_hook = S3Hook(aws_conn_id='EchoS3')
bucket_name='LSST-IR-FUSION-TESTCOLLATE',
bucket_name='LSST-IR-FUSION-TESTSTRATEGY',
bucket_key='/',
wildcard_match_suffix='.csv',
all_keys = s3_hook.list_keys(bucket_name=bucket_name, prefix=bucket_key, delimiter='/', suffix=wildcard_match_suffix, apply_wildcard=True),
Expand All @@ -34,15 +34,15 @@ def run_on_new_file(**kwargs):
}

dag = DAG(
'monitor-LSST-IR-FUSION-TESTCOLLATE',
'monitor-LSST-IR-FUSION-TESTSTRATEGY',
default_args=default_args,
description='Monitor LSST-IR-FUSION-TESTCOLLATE S3 bucket for new CSV-formatted upload log files.',
description='Monitor LSST-IR-FUSION-TESTSTRATEGY S3 bucket for new CSV-formatted upload log files.',
schedule=timedelta(days=1),
)

s3_sensor = S3KeySensor(
task_id='s3_sensor',
bucket_name='LSST-IR-FUSION-TESTCOLLATE',
bucket_name='LSST-IR-FUSION-TESTSTRATEGY',
bucket_key='*.csv',
wildcard_match=True,
aws_conn_id='EchoS3',
Expand All @@ -60,16 +60,16 @@ def run_on_new_file(**kwargs):
op_kwargs={'ds': '{{ ds }}'},
)

# check_csv = KubernetesPodOperator(
# task_id="check_key",
# name="check-key",
# namespace="airflow",
# image="localhost:32000/check-csv:latest",
# cmds=["python", "-c"],
# arguments=[new_keys],#,connection.get_credentials()access_key,connection.secret_key],
# get_logs=True,
# dag=dag,
# )
check_csv = KubernetesPodOperator(
task_id="check_key",
name="check-key",
namespace="airflow",
image="ghcr.io/lsst-uk/csd3-echo-somerville:latest",
cmds=["python", "-c"],
arguments=[new_keys],#,connection.get_credentials()access_key,connection.secret_key],
get_logs=True,
dag=dag,
)

#graph
s3_sensor >> run_on_new_file_op
Expand Down

0 comments on commit 642711b

Please sign in to comment.