Skip to content

Commit

Permalink
Switch to GH Actions (#415)
Browse files Browse the repository at this point in the history
* add initial GH actions

* Fake a failure

* Add software installation steps

* Add software installation steps

* Add software installation steps: Spark

* Add software installation steps: Spark

* Add software installation steps: Spark

* Add software installation steps: use axel to speed-up

* Add software installation steps: switch to single quote

* Add software installation steps: switch to single quote

* Add test suite

* Add test suite

* Update pythonpath in tests

* Switch to python 3.6

* action for sonarqube

* action for sonarqube

* Add Slack integration

* Remove travis integration

* Add new linter for fink_science/ and bin/

* Remove slack integration for sonarqube (already millions of other notifications)

* Exclude coverage from Sonar
  • Loading branch information
JulienPeloton authored Feb 3, 2021
1 parent 57f81f9 commit a5d992d
Show file tree
Hide file tree
Showing 23 changed files with 191 additions and 382 deletions.
99 changes: 99 additions & 0 deletions .github/workflows/full-test-broker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
name: Sentinel

on:
push:
branches:
- master
pull_request:

jobs:
install-checks:
runs-on: ubuntu-latest
steps:
- name: Check HBase 2.2.6 availability
run: |
wget --spider http://www-us.apache.org/dist/hbase/2.2.6/hbase-2.2.6-bin.tar.gz
- name: Check Kafka 2.6.1 availability
run: |
wget --spider https://www.apache.org/dist/kafka/2.6.1/kafka_2.12-2.6.1.tgz
- name: Check Spark 2.4.7 availability
run: |
wget --spider http://archive.apache.org/dist/spark/spark-2.4.7/spark-2.4.7-bin-hadoop2.7.tgz
test-suite:
needs: install-checks
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6]
spark-version: [2.4.7]
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install add-ons
run: |
sudo apt-get install axel
- name: Set up env [1/2]
run: |
echo "FINK_HOME=$GITHUB_WORKSPACE" >> $GITHUB_ENV
- name: Install Java 8
run: |
source conf/java8_for_xenial.sh
- name: Install HBase 2.2.6
run: |
source conf/install_hbase.sh
- name: Install Kafka 2.6.1
run: |
source conf/install_kafka.sh
echo "KAFKA_HOME=$FINK_HOME/kafka" >> $GITHUB_ENV
- name: Install Spark 2.4.7
run: |
axel -n10 --quiet https://archive.apache.org/dist/spark/spark-${{ matrix.spark-version }}/spark-${{ matrix.spark-version }}-bin-hadoop2.7.tgz
tar -xf spark-${{ matrix.spark-version }}-bin-hadoop2.7.tgz
echo "SPARK_HOME=$FINK_HOME/spark-${{ matrix.spark-version }}-bin-hadoop2.7" >> $GITHUB_ENV
- name: Download test data
run: |
cd datasim
source download_ztf_alert_data.sh
cd ..
echo "FINK_ALERT_SIMULATOR=${FINK_HOME}/fink-alert-simulator" >> $GITHUB_ENV
- name: Download simulator
run: |
git clone https://github.com/astrolabsoftware/fink-alert-simulator.git
- name: Set up env [2/2]
run: |
echo "SPARKLIB=${SPARK_HOME}/python:${SPARK_HOME}/python/lib/py4j-0.10.7-src.zip" >> $GITHUB_ENV
echo "$HOME/.local/bin:${FINK_HOME}/bin" >> $GITHUB_PATH
echo "${SPARK_HOME}/bin:${SPARK_HOME}/sbin" >> $GITHUB_PATH
echo "$FINK_ALERT_SIMULATOR/bin" >> $GITHUB_PATH
echo "spark.yarn.jars=${SPARK_HOME}/jars/*.jar" > ${SPARK_HOME}/conf/spark-defaults.conf
- name: Install Python dependencies
run: |
pip install --upgrade pip setuptools wheel
source ./install_python_deps.sh
echo "PYTHONPATH="${SPARKLIB}:${FINK_HOME}:${FINK_ALERT_SIMULATOR}"" >> $GITHUB_ENV
- name: Check env
run: |
echo "FINK_HOME: $FINK_HOME"
echo "SPARK_HOME: $SPARK_HOME"
echo "SPARKLIB: $SPARKLIB"
echo "FINK_ALERT_SIMULATOR: $FINK_ALERT_SIMULATOR"
echo "KAFKA_HOME: $KAFKA_HOME"
echo "PYTHONPATH: $PYTHONPATH"
- name: Run test suites
run: |
git fetch --unshallow --quiet
fink init -c ${FINK_HOME}/conf/fink.conf.travis
fink_kafka start
fink_kafka --create-topic fink_outstream
fink_simulator --docker -c ${FINK_HOME}/conf/fink_alert_simulator.conf
fink_test
bash <(curl -s https://codecov.io/bash)
- uses: act10ns/slack@v1
with:
status: ${{ job.status }}
if: always()
32 changes: 32 additions & 0 deletions .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: PEP8

on:
push:
branches:
- master
pull_request:

jobs:
build:

runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8
- name: fink-broker
run: |
flake8 fink_broker/*.py --count --show-source --statistics --ignore=E302,E501
- name: bin
run: |
flake8 bin/*.py --count --show-source --statistics --ignore=E302,E501
29 changes: 29 additions & 0 deletions .github/workflows/sonarqube.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: SonarQube

on:
push:
branches:
- master
pull_request:

jobs:
sonarcloud:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting
fetch-depth: 0
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
with:
args: >
-Dsonar.organization=astrolabsoftware
-Dsonar.projectKey=finkbroker
-Dsonar.sources=fink_broker/,bin/
-Dsonar.test.exclusions=fink_broker/htmlcov,fink_broker/slackUtils.py
-Dsonar.verbose=true
-Dsonar.coverage.exclusions=**/**
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
87 changes: 0 additions & 87 deletions .travis.yml

This file was deleted.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=finkbroker&metric=alert_status)](https://sonarcloud.io/dashboard?id=finkbroker) [![Maintainability Rating](https://sonarcloud.io/api/project_badges/measure?project=finkbroker&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=finkbroker)
[![Build Status](https://travis-ci.org/astrolabsoftware/fink-broker.svg?branch=master)](https://travis-ci.org/astrolabsoftware/fink-broker)
[![codecov](https://codecov.io/gh/astrolabsoftware/fink-broker/branch/master/graph/badge.svg)](https://codecov.io/gh/astrolabsoftware/fink-broker) [![Documentation Status](https://readthedocs.org/projects/fink-broker/badge/?version=latest)](https://fink-broker.readthedocs.io/en/latest/?badge=latest)
[![PEP8](https://github.com/astrolabsoftware/fink-broker/workflows/PEP8/badge.svg)](https://github.com/astrolabsoftware/fink-broker/workflows/PEP8)[![codecov](https://codecov.io/gh/astrolabsoftware/fink-broker/branch/master/graph/badge.svg)](https://codecov.io/gh/astrolabsoftware/fink-broker) [![Documentation Status](https://readthedocs.org/projects/fink-broker/badge/?version=latest)](https://fink-broker.readthedocs.io/en/latest/?badge=latest)

Fink is a broker infrastructure enabling a wide range of applications and services to connect to large streams of alerts issued from telescopes all over the world.

Expand Down
3 changes: 1 addition & 2 deletions bin/access_science_db.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2019 AstroLab Software
# Copyright 2019-2021 AstroLab Software
# Author: Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -16,7 +16,6 @@
"""Access the science database, and read alerts. HBase must be installed.
"""
import argparse
import time
import json

from fink_broker.parser import getargs
Expand Down
4 changes: 1 addition & 3 deletions bin/checkstream.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2019 AstroLab Software
# Copyright 2019-2021 AstroLab Software
# Author: Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -15,8 +15,6 @@
# limitations under the License.
"""Monitor Kafka stream received by Spark
"""
from pyspark.sql import SparkSession

import argparse
import time

Expand Down
4 changes: 1 addition & 3 deletions bin/distribute.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2019 AstroLab Software
# Copyright 2019-2021 AstroLab Software
# Author: Abhishek Chauhan, Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -21,8 +21,6 @@
3. Serialize into Avro
3. Publish to Kafka Topic(s)
"""
from pyspark.sql.functions import lit

import argparse
import time

Expand Down
4 changes: 1 addition & 3 deletions bin/distribution_test.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2019 AstroLab Software
# Copyright 2019-2021 AstroLab Software
# Author: Abhishek Chauhan
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -21,8 +21,6 @@
2. Deserialize the avro data using the pre-defined schema
3. Carry out operations on the obtained DataFrame
"""
from pyspark.sql.functions import col

import argparse
import time

Expand Down
4 changes: 1 addition & 3 deletions bin/index_archival.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2020 AstroLab Software
# Copyright 2020-2021 AstroLab Software
# Author: Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -25,8 +25,6 @@
from pyspark.sql.functions import arrays_zip, explode

import argparse
import time
import json

from fink_broker import __version__ as fbvsn
from fink_broker.parser import getargs
Expand Down
6 changes: 1 addition & 5 deletions bin/merge_ztf_night.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2020 AstroLab Software
# Copyright 2020-2021 AstroLab Software
# Author: Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -15,13 +15,9 @@
# limitations under the License.
"""Retrieve one ZTF day, and merge small files into larger ones.
"""
from pyspark.sql import DataFrame
from pyspark.sql import functions as F

import argparse
import time
import json
import subprocess

from fink_broker.parser import getargs
from fink_broker.sparkUtils import init_sparksession
Expand Down
4 changes: 1 addition & 3 deletions bin/raw2science.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python
# Copyright 2019-2020 AstroLab Software
# Copyright 2019-2021 AstroLab Software
# Author: Julien Peloton
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand All @@ -22,12 +22,10 @@
See http://cdsxmatch.u-strasbg.fr/ for more information on the SIMBAD catalog.
"""
from pyspark.sql import DataFrame
from pyspark.sql import functions as F

import argparse
import time
import os

from fink_broker import __version__ as fbvsn
from fink_broker.parser import getargs
Expand Down
Loading

0 comments on commit a5d992d

Please sign in to comment.