Skip to content

Commit

Permalink
Merge pull request #55 from FusRoman/issue/54/healpix_query_disc
Browse files Browse the repository at this point in the history
fix healpix bug and add more test
  • Loading branch information
FusRoman authored Jan 11, 2023
2 parents cee03df + 55dda75 commit 52d76c5
Show file tree
Hide file tree
Showing 14 changed files with 412 additions and 99 deletions.
41 changes: 31 additions & 10 deletions .github/workflows/run_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,48 +15,69 @@ jobs:

strategy:
matrix:
container: ["fusroman/fink_fat_docker:dev"]
container: ["julienpeloton/fink-ci:prod"]

container:
image: ${{ matrix.container }}

steps:
- uses: actions/checkout@v2
- name: Set up env [1/2]

- name: Set up env [1/3]
run: |
echo "FINK_GRB=$GITHUB_WORKSPACE" >> $GITHUB_ENV
echo "JAVA_HOME=$(dirname $(dirname $(readlink -f $(which java))))" >> $GITHUB_ENV
echo "${BINPATH}" >> $GITHUB_PATH
# echo "${BINPATH}" >> $GITHUB_PATH

- name: Set up env [2/2]
- name: Set up env [2/3]
run: |
echo "PYTHONPATH="${PYTHONPATH}:${SPARKLIB}:${FINK_GRB}"" >> $GITHUB_ENV
- name: Install fink-broker
run: |
cd ${HOME}
git clone https://github.com/astrolabsoftware/fink-broker.git
cd fink-broker
pip install .
cd ../
export FINK_HOME=$HOME/fink-broker
export PATH=$PATH:$FINK_HOME/bin
export PYTHONPATH=$PYTHONPATH:$FINK_HOME
echo "FINK_HOME=${HOME}/fink-broker" >> $GITHUB_ENV
cd $FINK_GRB
- name: Set up env [3/3]
run: |
echo "PYTHONPATH=${PYTHONPATH}:${FINK_HOME}" >> $GITHUB_ENV
echo "${FINK_HOME}/bin" >> $GITHUB_PATH
echo "FINK_PACKAGES=org.apache.spark:spark-streaming-kafka-0-10-assembly_2.12:3.1.3,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,org.apache.spark:spark-avro_2.12:3.1.3,org.apache.hbase:hbase-shaded-mapreduce:2.2.7" >> $GITHUB_ENV
echo "FINK_JARS=${FINK_HOME}/libs/fink-broker_2.11-1.2.jar,${FINK_HOME}/libs/hbase-spark-hbase2.2_spark3_scala2.11_hadoop2.7.jar,${FINK_HOME}/libs/hbase-spark-protocol-shaded-hbase2.2_spark3_scala2.11_hadoop2.7.jar" >> $GITHUB_ENV
- name: Install requirements
run: |
pip install -r requirements.txt
- name: Check env
run: |
echo "GITHUB_PATH: $GITHUB_PATH"
echo "PATH: $PATH"
echo "FINK_GRB: $FINK_GRB"
echo "SPARK_HOME: $SPARK_HOME"
echo "SPARKLIB: $SPARKLIB"
echo "PYTHONPATH: $PYTHONPATH"
echo "JAVA_HOME: $JAVA_HOME"
echo "FINK_HOME: $FINK_HOME"
echo "FINK_PACKAGES: $FINK_PACKAGES"
echo "FINK_JARS: $FINK_JARS"
echo `python -V`
- name: Push Hbase data
run: |
cd $USRLIBS
source scripts/start_services.sh --kafka-version ${KAFKA_VERSION} --hbase-version ${HBASE_VERSION}
cd $FINK_GRB
fink start index_archival -c ${FINK_GRB}/fink_grb/test/test_data/with_hbase/fink.conf.prod --night 20190903 --index_table jd_objectId
- name: Check HBase table
run: |
echo 'list' | /home/libs/hbase-2.2.7/bin/hbase shell
- name: Run test suites
run: |
./run_test.sh
Expand Down
2 changes: 1 addition & 1 deletion fink_grb/conf/fink_grb.conf
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ max_core=16
executor_core=8
external_python_libs=
jars=
packages=
packages=org.apache.spark:spark-streaming-kafka-0-10-assembly_2.12:3.1.3,org.apache.spark:spark-sql-kafka-0-10_2.12:3.1.3,org.apache.spark:spark-avro_2.12:3.1.3,org.apache.hbase:hbase-shaded-mapreduce:2.2.7

[ADMIN]
verbose=False
Expand Down
Loading

0 comments on commit 52d76c5

Please sign in to comment.