Skip to content

Sentinel

Sentinel #899

Workflow file for this run

name: Sentinel
on:
# Trigger the workflow on push or pull request,
# but only for the main branch
push:
branches:
- main
pull_request:
schedule:
- cron: '0 6 * * 1,4'
env:
NIGHT: 20190903
CONFIGFILE_PATH: fink_mm/conf/integration.conf
SECRET_ID: ${{ secrets.GCN_ID }}
SECRET_SECRET: ${{ secrets.GCN_SECRET }}
jobs:
run-unit-test:
name: run-unit-test
runs-on: ubuntu-latest
strategy:
matrix:
container: ["julienpeloton/fink-ci:prod"]
container:
image: ${{ matrix.container }}
steps:
- uses: actions/checkout@v2
- name: Set up env [1/3]
run: |
echo "ROOTPATH=`pwd`" >> $GITHUB_ENV
echo "FINK_MM=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))" >> $GITHUB_ENV
- name: Set up env [2/3]
run: |
echo "PYTHONPATH="${PYTHONPATH}:${SPARKLIB}:${FINK_MM}"" >> $GITHUB_ENV
- name: Install fink-broker
run: |
cd ${HOME}
git clone https://github.com/astrolabsoftware/fink-broker.git
cd fink-broker
pip install .
echo "FINK_HOME=${HOME}/fink-broker" >> $GITHUB_ENV
cd $FINK_MM
- name: Set up env [3/3]
run: |
echo "PYTHONPATH=${PYTHONPATH}:${FINK_HOME}" >> $GITHUB_ENV
echo "${FINK_HOME}/bin" >> $GITHUB_PATH
echo "FINK_PACKAGES=org.apache.spark:spark-streaming-kafka-0-10-assembly_2.12:3.1.3,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,org.apache.spark:spark-avro_2.12:3.1.3,org.apache.hbase:hbase-shaded-mapreduce:2.2.7" >> $GITHUB_ENV
echo "FINK_JARS=${FINK_HOME}/libs/hbase-spark-hbase2.2_spark3_scala2.11_hadoop2.7.jar,${FINK_HOME}/libs/hbase-spark-protocol-shaded-hbase2.2_spark3_scala2.11_hadoop2.7.jar" >> $GITHUB_ENV
- name: Install requirements
run: |
pip install -r requirements.txt
- name: Check env
run: |
echo "GITHUB_PATH: $GITHUB_PATH"
echo "PATH: $PATH"
echo "FINK_MM: $FINK_MM"
echo "SPARK_HOME: $SPARK_HOME"
echo "SPARKLIB: $SPARKLIB"
echo "PYTHONPATH: $PYTHONPATH"
echo "JAVA_HOME: $JAVA_HOME"
echo "FINK_HOME: $FINK_HOME"
echo "FINK_PACKAGES: $FINK_PACKAGES"
echo "FINK_JARS: $FINK_JARS"
echo `python -V`
echo
echo
export
- name: Start services
run: |
cd $USRLIBS
source scripts/start_services.sh --kafka-version ${KAFKA_VERSION} --hbase-version ${HBASE_VERSION}
cd $FINK_MM
- name: Call raw2science
run: |
fink start raw2science -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT --exit_after 90
- name: Merge data
run: |
fink start merge -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT
- name: Push Hbase data
run: |
fink start index_archival -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT --index_table jd_objectId
- name: Check HBase table
run: |
echo 'list' | /home/libs/hbase-2.2.7/bin/hbase shell
- name: Test preamble
run: |
echo "PYTHONPATH="${SPARK_HOME}/python/test_coverage:$PYTHONPATH"" >> $GITHUB_ENV
echo "COVERAGE_PROCESS_START="${ROOTPATH}/.coveragerc"" >> $GITHUB_ENV
python -m pip install . --upgrade
pip install -U fink_filters
- name: Run test suites
run: |
coverage run --source=${ROOTPATH} --rcfile=${ROOTPATH}/.coveragerc -m pytest --doctest-modules fink_mm
curl -s https://codecov.io/bash | bash
run-integration-test:
name: run-integration-test
runs-on: ubuntu-latest
strategy:
matrix:
container: ["julienpeloton/fink-ci:prod"]
container:
image: ${{ matrix.container }}
steps:
- uses: actions/checkout@v2
- name: Set up env [1/3]
run: |
echo "ROOTPATH=`pwd`" >> $GITHUB_ENV
echo "FINK_MM=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))" >> $GITHUB_ENV
- name: Set up env [2/3]
run: |
echo "PYTHONPATH="${PYTHONPATH}:${SPARKLIB}:${FINK_MM}"" >> $GITHUB_ENV
echo "NIGHT=`date +"%Y%m%d" -d "now"`" >> $GITHUB_ENV
- name: Install fink-broker
run: |
cd ${HOME}
git clone https://github.com/astrolabsoftware/fink-broker.git
cd fink-broker
pip install .
echo "FINK_HOME=${HOME}/fink-broker" >> $GITHUB_ENV
cd $FINK_MM
- name: Set up env [3/3]
run: |
echo "PYTHONPATH=${PYTHONPATH}:${FINK_HOME}" >> $GITHUB_ENV
echo "${FINK_HOME}/bin" >> $GITHUB_PATH
echo "FINK_PACKAGES=org.apache.spark:spark-streaming-kafka-0-10-assembly_2.12:3.1.3,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,org.apache.spark:spark-avro_2.12:3.1.3,org.apache.hbase:hbase-shaded-mapreduce:2.2.7" >> $GITHUB_ENV
echo "FINK_JARS=${FINK_HOME}/libs/hbase-spark-hbase2.2_spark3_scala2.11_hadoop2.7.jar,${FINK_HOME}/libs/hbase-spark-protocol-shaded-hbase2.2_spark3_scala2.11_hadoop2.7.jar" >> $GITHUB_ENV
- name: Install requirements
run: |
pip install -r requirements.txt
- name: Test preamble
run: |
echo "PYTHONPATH="${SPARK_HOME}/python/test_coverage:$PYTHONPATH"" >> $GITHUB_ENV
echo "COVERAGE_PROCESS_START="${ROOTPATH}/.coveragerc"" >> $GITHUB_ENV
python -m pip install . --upgrade
python -m pip install -U fink_filters
mkdir fink_mm/ci_gcn_test
mkdir fink_mm/ci_join_test
- name: Launch GCN stream
# don't put --test for the gcn_stream command otherwise it will miss the gw alerts
shell: bash
run: |
fink_mm gcn_stream start --config $CONFIGFILE_PATH --test --verbose > fink_mm_gcnstream_${NIGHT}.log 2>&1 &
sleep 180s
- name: Start services
run: |
cd $USRLIBS
source scripts/start_services.sh --kafka-version ${KAFKA_VERSION} --hbase-version ${HBASE_VERSION}
cd $FINK_MM
- name: Generate fake gcn counterparts
run: |
python fink_mm/test/prep_ztf_data.py
mv fink_mm/test/test_data/ztf_test/online/raw/year=2019 fink_mm
- name: Check data
run: |
echo "GCN ALERTS"
ls -dlth fink_mm/ci_gcn_test/*/*/*/*
echo ""
echo "ZTF ALERTS"
echo "--- ONLINE SCIENCE"
ls -dlth fink_mm/test/test_data/ztf_test/online/science/*/*/*/*
echo "--- ARCHIVE SCIENCE"
ls -dlth fink_mm/test/test_data/ztf_test/archive/science/*/*/*/*
# - name: Call raw2science
# run: |
# fink start raw2science -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT --exit_after 90
# - name: Merge data
# run: |
# fink start merge -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT
- name: Push Hbase data
run: |
fink start index_archival -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT --index_table jd_objectId
- name: Run online join streaming
run: |
fink_mm join_stream online --config $CONFIGFILE_PATH --night $NIGHT --exit_after 180 --verbose
- name: Run offline stream
run: |
fink_mm join_stream offline --night $NIGHT --config $CONFIGFILE_PATH --test --verbose
- name: Online Distribution
run: |
fink_mm distribute --config $CONFIGFILE_PATH --night $NIGHT --exit_after 60 --verbose
- name: Check online stream
run: |
ls -lth fink_mm/ci_join_test/online/*/*/*
- name: Check offline stream
run: |
ls -lth fink_mm/ci_join_test/offline/*/*/*
# - name: Check Distribution
# run: |
#python fink_mm/test/display_distribution.py