-
Notifications
You must be signed in to change notification settings - Fork 0
211 lines (173 loc) · 6.94 KB
/
run_test.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
name: Sentinel
on:
# Trigger the workflow on push or pull request,
# but only for the main branch
push:
branches:
- main
pull_request:
schedule:
- cron: '0 6 * * 1,4'
env:
NIGHT: 20190903
CONFIGFILE_PATH: fink_mm/conf/integration.conf
SECRET_ID: ${{ secrets.GCN_ID }}
SECRET_SECRET: ${{ secrets.GCN_SECRET }}
jobs:
run-unit-test:
name: run-unit-test
runs-on: ubuntu-latest
strategy:
matrix:
container: ["julienpeloton/fink-ci:dev"]
container:
image: ${{ matrix.container }}
steps:
- uses: actions/checkout@v2
- name: Set up env [1/3]
run: |
echo "ROOTPATH=`pwd`" >> $GITHUB_ENV
echo "FINK_MM=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))" >> $GITHUB_ENV
- name: Set up env [2/3]
run: |
echo "PYTHONPATH="${PYTHONPATH}:${SPARKLIB}:${FINK_MM}"" >> $GITHUB_ENV
- name: Install fink-broker
run: |
cd ${HOME}
git clone https://github.com/astrolabsoftware/fink-broker.git
cd fink-broker
pip install .
echo "FINK_HOME=${HOME}/fink-broker" >> $GITHUB_ENV
cd $FINK_MM
- name: Set up env [3/3]
run: |
echo "PYTHONPATH=${PYTHONPATH}:${FINK_HOME}" >> $GITHUB_ENV
echo "${FINK_HOME}/bin" >> $GITHUB_PATH
echo "FINK_PACKAGES=org.apache.spark:spark-streaming-kafka-0-10-assembly_2.12:3.1.3,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,org.apache.spark:spark-avro_2.12:3.1.3,org.apache.hbase:hbase-shaded-mapreduce:2.2.7" >> $GITHUB_ENV
echo "FINK_JARS=${FINK_HOME}/libs/hbase-spark-hbase2.2_spark3_scala2.11_hadoop2.7.jar,${FINK_HOME}/libs/hbase-spark-protocol-shaded-hbase2.2_spark3_scala2.11_hadoop2.7.jar" >> $GITHUB_ENV
- name: Install requirements
run: |
pip install -r requirements.txt
- name: Check env
run: |
echo "GITHUB_PATH: $GITHUB_PATH"
echo "PATH: $PATH"
echo "FINK_MM: $FINK_MM"
echo "SPARK_HOME: $SPARK_HOME"
echo "SPARKLIB: $SPARKLIB"
echo "PYTHONPATH: $PYTHONPATH"
echo "JAVA_HOME: $JAVA_HOME"
echo "FINK_HOME: $FINK_HOME"
echo "FINK_PACKAGES: $FINK_PACKAGES"
echo "FINK_JARS: $FINK_JARS"
echo `python -V`
echo
echo
export
- name: Test preamble
run: |
echo "PYTHONPATH="${SPARK_HOME}/python/test_coverage:$PYTHONPATH"" >> $GITHUB_ENV
echo "COVERAGE_PROCESS_START="${ROOTPATH}/.coveragerc"" >> $GITHUB_ENV
python -m pip install . --upgrade
pip install -U fink_filters
- name: Run test suites
run: |
coverage run --source=${ROOTPATH} --rcfile=${ROOTPATH}/.coveragerc -m pytest --doctest-modules fink_mm
curl -s https://codecov.io/bash | bash
run-integration-test:
name: run-integration-test
runs-on: ubuntu-latest
strategy:
matrix:
container: ["julienpeloton/fink-ci:prod"]
container:
image: ${{ matrix.container }}
steps:
- uses: actions/checkout@v2
- name: Set up env [1/3]
run: |
echo "ROOTPATH=`pwd`" >> $GITHUB_ENV
echo "FINK_MM=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))" >> $GITHUB_ENV
- name: Set up env [2/3]
run: |
echo "PYTHONPATH="${PYTHONPATH}:${SPARKLIB}:${FINK_MM}"" >> $GITHUB_ENV
echo "NIGHT=`date +"%Y%m%d" -d "now"`" >> $GITHUB_ENV
- name: Install fink-broker
run: |
cd ${HOME}
git clone https://github.com/astrolabsoftware/fink-broker.git
cd fink-broker
pip install .
echo "FINK_HOME=${HOME}/fink-broker" >> $GITHUB_ENV
cd $FINK_MM
- name: Set up env [3/3]
run: |
echo "PYTHONPATH=${PYTHONPATH}:${FINK_HOME}" >> $GITHUB_ENV
echo "${FINK_HOME}/bin" >> $GITHUB_PATH
echo "FINK_PACKAGES=org.apache.spark:spark-streaming-kafka-0-10-assembly_2.12:3.1.3,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,org.apache.spark:spark-avro_2.12:3.1.3,org.apache.hbase:hbase-shaded-mapreduce:2.2.7" >> $GITHUB_ENV
echo "FINK_JARS=${FINK_HOME}/libs/hbase-spark-hbase2.2_spark3_scala2.11_hadoop2.7.jar,${FINK_HOME}/libs/hbase-spark-protocol-shaded-hbase2.2_spark3_scala2.11_hadoop2.7.jar" >> $GITHUB_ENV
- name: Install requirements
run: |
pip install -r requirements.txt
- name: Test preamble
run: |
echo "PYTHONPATH="${SPARK_HOME}/python/test_coverage:$PYTHONPATH"" >> $GITHUB_ENV
echo "COVERAGE_PROCESS_START="${ROOTPATH}/.coveragerc"" >> $GITHUB_ENV
python -m pip install . --upgrade
python -m pip install -U fink_filters
mkdir fink_mm/ci_gcn_test
mkdir fink_mm/ci_join_test
- name: Launch GCN stream
# don't put --test for the gcn_stream command otherwise it will miss the gw alerts
shell: bash
run: |
fink_mm gcn_stream start --config $CONFIGFILE_PATH --test --verbose > fink_mm_gcnstream_${NIGHT}.log 2>&1 &
sleep 180s
- name: Start services
run: |
cd $USRLIBS
source scripts/start_services.sh --kafka-version ${KAFKA_VERSION} --hbase-version ${HBASE_VERSION}
cd $FINK_MM
- name: Generate fake gcn counterparts
run: |
python fink_mm/test/prep_ztf_data.py
mv fink_mm/test/test_data/ztf_test/online/raw/year=2019 fink_mm
- name: Check data
run: |
echo "GCN ALERTS"
ls -dlth fink_mm/ci_gcn_test/*/*/*/*
echo ""
echo "ZTF ALERTS"
echo "--- ONLINE SCIENCE"
ls -dlth fink_mm/test/test_data/ztf_test/online/science/*/*/*/*
echo "--- ARCHIVE SCIENCE"
ls -dlth fink_mm/test/test_data/ztf_test/archive/science/*/*/*/*
# - name: Call raw2science
# run: |
# fink start raw2science -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT --exit_after 90
# - name: Merge data
# run: |
# fink start merge -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT
- name: Push Hbase data
run: |
fink start index_archival -c ${FINK_MM}/fink_mm/test/test_data/with_hbase/fink_test.conf --night $NIGHT --index_table jd_objectId
- name: Run online join streaming
run: |
fink_mm join_stream online --config $CONFIGFILE_PATH --night $NIGHT --exit_after 180 --verbose
- name: Run offline stream
run: |
fink_mm join_stream offline --night $NIGHT --config $CONFIGFILE_PATH --test --verbose
- name: Online Distribution
run: |
fink_mm distribute --config $CONFIGFILE_PATH --night $NIGHT --exit_after 60 --verbose
- name: Check online stream
run: |
ls -lth fink_mm/ci_join_test/online/*/*/*
- name: Check offline stream
run: |
ls -lth fink_mm/ci_join_test/offline/*/*/*
# - name: Check Distribution
# run: |
#python fink_mm/test/display_distribution.py