Skip to content

Commit

Permalink
remove singularity from vcell CI/CD (keep it in CLI/biosimulations)
Browse files Browse the repository at this point in the history
  • Loading branch information
jcschaff committed Sep 26, 2024
1 parent dd8f83e commit 8ebbdba
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 94 deletions.
22 changes: 5 additions & 17 deletions .github/workflows/CI-full.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,34 +94,22 @@ jobs:
java-version: '17'
cache: 'maven'

- name: Install Singularity # to make singularity image for cluster
uses: eWaterCycle/setup-singularity@v6
with:
singularity-version: 3.7.1
- name: build and publish all images
shell: bash
run: |
cd docker/build
sudo docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io
docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io
./build.sh all ${{ env.VCELL_REPO_NAMESPACE }} ${{ env.VCELL_TAG }}
cd singularity-vm
singularity remote login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} oras://ghcr.io
- name: tag as latest and push to registry # (jcs) are explicit singularity push commands redundant? (see ./build.sh)
- name: tag as latest and push to registry
shell: bash
run: |
for CONTAINER in vcell-api vcell-rest vcell-webapp-prod vcell-webapp-dev vcell-webapp-stage vcell-webapp-island vcell-batch vcell-opt vcell-clientgen vcell-data vcell-db vcell-mongo vcell-sched vcell-submit vcell-admin;\
do docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:latest;\
docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:${{ steps.version.outputs.tag }};\
docker push --all-tags ${VCELL_REPO_NAMESPACE}/$CONTAINER;\
do docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:latest;\
docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:${{ steps.version.outputs.tag }};\
docker push --all-tags ${VCELL_REPO_NAMESPACE}/$CONTAINER;\
done
cd docker/build/singularity-vm
singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${VCELL_TAG}
singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${{ steps.version.outputs.tag }}
singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:latest
singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${VCELL_TAG}
singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${{ steps.version.outputs.tag }}
singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:latest
- name: Setup tmate session
uses: mxschmitt/action-tmate@v3
Expand Down
26 changes: 2 additions & 24 deletions .github/workflows/site_deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -182,20 +182,6 @@ jobs:
ssh-keyscan $VCELL_MANAGER_NODE >> ~/.ssh/known_hosts
cd docker/swarm
scp ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE}:${VCELL_DEPLOY_REMOTE_DIR}/${VCELL_CONFIG_FILE_NAME} .
- name: install singularity
uses: eWaterCycle/setup-singularity@v6
with:
singularity-version: 3.7.1
- name: retrieve batch and opt singularity images
run: |
set -ux
cd docker/swarm
export BATCH_SINGULARITY_FILENAME=`cat $VCELL_CONFIG_FILE_NAME | grep VCELL_BATCH_SINGULARITY_FILENAME | cut -d"=" -f2`
export OPT_SINGULARITY_FILENAME=`cat $VCELL_CONFIG_FILE_NAME | grep VCELL_OPT_SINGULARITY_FILENAME | cut -d"=" -f2`
cd ../build/singularity-vm
singularity remote login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} oras://ghcr.io
singularity pull $BATCH_SINGULARITY_FILENAME oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${{ github.event.inputs.vcell_version }}.${{ github.event.inputs.vcell_build }}
singularity pull $OPT_SINGULARITY_FILENAME oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${{ github.event.inputs.vcell_version }}.${{ github.event.inputs.vcell_build }}
- name: setup java 17 with maven cache (for documentation build)
uses: actions/setup-java@v4
with:
Expand All @@ -207,16 +193,15 @@ jobs:
run: |
set -ux
mvn clean install -DskipTests
- name: deploy installers and singularity to kubernetes site and web help to vcell.org
- name: deploy installers and web help to vcell.org
run: |
set -ux
cd docker/swarm
ssh -t ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE} sudo docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io
if ${{ github.event.inputs.server_only != 'true' }}; then
# build and install the client installers, the singularity images, and the web help (kubernetes cluster deployments are separate)
# build and install the client installers, and the web help (kubernetes cluster deployments are separate)
./deploy-action-kubernetes.sh \
--ssh-user ${{ secrets.CD_FULL_USER }} \
--install-singularity \
--build-installers \
--installer-deploy-dir $VCELL_INSTALLER_REMOTE_DIR \
--webhelp-local-dir ../../vcell-client/target/classes/vcellDoc \
Expand All @@ -227,13 +212,6 @@ jobs:
ssh ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE} \
installer_deploy_dir=$VCELL_INSTALLER_REMOTE_DIR vcell_siteCamel=$VCELL_SITE_CAMEL vcell_version=$VCELL_VERSION vcell_build=$VCELL_BUILD \
'bash -s' < link-installers.sh
else
# build and install only the singularity images (kubernetes cluster deployments are separate)
./deploy-action-kubernetes.sh \
--ssh-user ${{ secrets.CD_FULL_USER }} \
--install-singularity \
${VCELL_MANAGER_NODE} \
./${VCELL_CONFIG_FILE_NAME}
fi
- name: Capitalize first character of site name
id: capitalize
Expand Down
1 change: 0 additions & 1 deletion docker/build/Dockerfile-submit-dev
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \
htc_vcellbatch_docker_name="htc-vcellbatch-docker-name-not-set" \
htc_vcellbatch_solver_list="htc-vcellbatch-solver-list-not-set" \
htc_vcellopt_docker_name="htc-vcellopt-docker-name-not-set" \
opt_singularity_imagefile=/path/to/external/opt/singularity_opt.img \
batchhost="batch-host-not-set" \
batchuser="batch-user-not-set" \
slurm_cmd_sbatch=sbatch \
Expand Down
53 changes: 1 addition & 52 deletions docker/swarm/deploy-action-kubernetes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
set -ux

show_help() {
echo "Deploys vcell client installers, webhelp and singularity images for a Kubernetes deploy"
echo "Deploys vcell client installers and webhelp for a Kubernetes deploy"
echo ""
echo "usage: deploy-action-kubernetes.sh [OPTIONS] REQUIRED-ARGUMENTS"
echo ""
Expand All @@ -30,14 +30,11 @@ show_help() {
echo " --webhelp-deploy-dir /remote/path/to/web/VCell_Help"
echo " directory for deployed html webhelp published on web server"
echo ""
echo " --install-singularity optionally install batch and opt singularity images on each compute node in 'vcell' SLURM partition"
echo ""
echo ""
echo "example:"
echo ""
echo "deploy-action-kubernetes.sh \\"
echo " --ssh-user vcell \\"
echo " --install_singularity \\"
echo " --build_installers --installer_deploy_dir /share/apps/vcell3/apache_webroot/htdocs/webstart/Alpha \\"
echo " --webhelp_local_dir ../../vcell-client/target/classes/vcellDoc \\"
echo " --webhelp_deploy_dir /share/apps/vcell3/apache_webroot/htdocs/webstart/VCell_Tutorials/VCell_Help \\"
Expand All @@ -55,7 +52,6 @@ installer_deploy_dir=
webhelp_local_dir=
webhelp_deploy_dir=
build_installers=false
install_singularity=false
while :; do
case $1 in
-h|--help)
Expand All @@ -78,9 +74,6 @@ while :; do
shift
webhelp_deploy_dir=$1
;;
--install-singularity)
install_singularity=true
;;
--build-installers)
build_installers=true
;;
Expand All @@ -106,50 +99,6 @@ local_config_file=$2
vcell_siteCamel=$(grep VCELL_SITE_CAMEL "$local_config_file" | cut -d"=" -f2)
vcell_version=$(grep VCELL_VERSION_NUMBER "$local_config_file" | cut -d"=" -f2)
vcell_build=$(grep VCELL_BUILD_NUMBER "$local_config_file" | cut -d"=" -f2)
batch_singularity_filename=$(grep VCELL_BATCH_SINGULARITY_FILENAME "$local_config_file" | cut -d"=" -f2)
opt_singularity_filename=$(grep VCELL_OPT_SINGULARITY_FILENAME "$local_config_file" | cut -d"=" -f2)
slurm_singularity_central_dir=$(grep VCELL_SLURM_CENTRAL_SINGULARITY_DIR "$local_config_file" | cut -d"=" -f2)


#
# install the singularity images on the cluster nodes
#
if [ "$install_singularity" == "true" ]; then

echo ""
pushd ../build/singularity-vm || (echo "pushd ../build/singularity-vm failed"; exit 1)
echo ""
echo "CURRENT DIRECTORY IS $PWD"

#
# get configuration from config file and load into current bash environment
#
echo ""

if [ ! -e "./${batch_singularity_filename}" ]; then
echo "failed to find local batch singularity image file $batch_singularity_filename in ./singularity-vm directory"
exit 1
fi

if ! scp "./${batch_singularity_filename}" "$ssh_user@$manager_node:${slurm_singularity_central_dir}"; then
echo "failed to copy batch singularity image to server"
exit 1
fi

if [ ! -e "./${opt_singularity_filename}" ]; then
echo "failed to find local opt singularity image file $opt_singularity_filename in ./singularity-vm directory"
exit 1
fi

if ! scp "./${opt_singularity_filename}" "$ssh_user@$manager_node:${slurm_singularity_central_dir}"; then
echo "failed to copy opt singularity image to server"
exit 1
fi

echo "popd"
popd || (echo "popd failed"; exit 1)
fi


#
# if --build-installers, then generate client installers, placing then in ./generated_installers
Expand Down

0 comments on commit 8ebbdba

Please sign in to comment.