diff --git a/.github/workflows/CI-full.yml b/.github/workflows/CI-full.yml index ebfc08f9a1..cb5318ffea 100644 --- a/.github/workflows/CI-full.yml +++ b/.github/workflows/CI-full.yml @@ -94,10 +94,6 @@ jobs: java-version: '17' cache: 'maven' - - name: Install Singularity # to make singularity image for cluster - uses: eWaterCycle/setup-singularity@v6 - with: - singularity-version: 3.7.1 - name: build and publish all images shell: bash run: | @@ -105,23 +101,15 @@ jobs: sudo docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io ./build.sh all ${{ env.VCELL_REPO_NAMESPACE }} ${{ env.VCELL_TAG }} - cd singularity-vm - singularity remote login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} oras://ghcr.io - - name: tag as latest and push to registry # (jcs) are explicit singularity push commands redundant? (see ./build.sh) + + - name: tag as latest and push to registry shell: bash run: | for CONTAINER in vcell-api vcell-rest vcell-webapp-prod vcell-webapp-dev vcell-webapp-stage vcell-webapp-island vcell-batch vcell-opt vcell-clientgen vcell-data vcell-db vcell-mongo vcell-sched vcell-submit vcell-admin;\ - do docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:latest;\ - docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:${{ steps.version.outputs.tag }};\ - docker push --all-tags ${VCELL_REPO_NAMESPACE}/$CONTAINER;\ + do docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:latest;\ + docker tag ${VCELL_REPO_NAMESPACE}/$CONTAINER:${VCELL_TAG} ${VCELL_REPO_NAMESPACE}/$CONTAINER:${{ steps.version.outputs.tag }};\ + docker push --all-tags ${VCELL_REPO_NAMESPACE}/$CONTAINER;\ done - cd docker/build/singularity-vm - singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${VCELL_TAG} - singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${{ steps.version.outputs.tag }} - singularity push -U $(ls *batch*img) oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:latest - singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${VCELL_TAG} - singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${{ steps.version.outputs.tag }} - singularity push -U $(ls *opt*img) oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:latest - name: Setup tmate session uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/site_deploy.yml b/.github/workflows/site_deploy.yml index 5a5895f274..8c7cf0cf90 100644 --- a/.github/workflows/site_deploy.yml +++ b/.github/workflows/site_deploy.yml @@ -182,20 +182,6 @@ jobs: ssh-keyscan $VCELL_MANAGER_NODE >> ~/.ssh/known_hosts cd docker/swarm scp ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE}:${VCELL_DEPLOY_REMOTE_DIR}/${VCELL_CONFIG_FILE_NAME} . - - name: install singularity - uses: eWaterCycle/setup-singularity@v6 - with: - singularity-version: 3.7.1 - - name: retrieve batch and opt singularity images - run: | - set -ux - cd docker/swarm - export BATCH_SINGULARITY_FILENAME=`cat $VCELL_CONFIG_FILE_NAME | grep VCELL_BATCH_SINGULARITY_FILENAME | cut -d"=" -f2` - export OPT_SINGULARITY_FILENAME=`cat $VCELL_CONFIG_FILE_NAME | grep VCELL_OPT_SINGULARITY_FILENAME | cut -d"=" -f2` - cd ../build/singularity-vm - singularity remote login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} oras://ghcr.io - singularity pull $BATCH_SINGULARITY_FILENAME oras://${VCELL_REPO_NAMESPACE}/vcell-batch-singularity:${{ github.event.inputs.vcell_version }}.${{ github.event.inputs.vcell_build }} - singularity pull $OPT_SINGULARITY_FILENAME oras://${VCELL_REPO_NAMESPACE}/vcell-opt-singularity:${{ github.event.inputs.vcell_version }}.${{ github.event.inputs.vcell_build }} - name: setup java 17 with maven cache (for documentation build) uses: actions/setup-java@v4 with: @@ -207,16 +193,15 @@ jobs: run: | set -ux mvn clean install -DskipTests - - name: deploy installers and singularity to kubernetes site and web help to vcell.org + - name: deploy installers and web help to vcell.org run: | set -ux cd docker/swarm ssh -t ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE} sudo docker login -u ${{ secrets.ACTION_USER }} -p ${{ secrets.ACTION_TOKEN }} ghcr.io if ${{ github.event.inputs.server_only != 'true' }}; then - # build and install the client installers, the singularity images, and the web help (kubernetes cluster deployments are separate) + # build and install the client installers, and the web help (kubernetes cluster deployments are separate) ./deploy-action-kubernetes.sh \ --ssh-user ${{ secrets.CD_FULL_USER }} \ - --install-singularity \ --build-installers \ --installer-deploy-dir $VCELL_INSTALLER_REMOTE_DIR \ --webhelp-local-dir ../../vcell-client/target/classes/vcellDoc \ @@ -227,13 +212,6 @@ jobs: ssh ${{ secrets.CD_FULL_USER }}@${VCELL_MANAGER_NODE} \ installer_deploy_dir=$VCELL_INSTALLER_REMOTE_DIR vcell_siteCamel=$VCELL_SITE_CAMEL vcell_version=$VCELL_VERSION vcell_build=$VCELL_BUILD \ 'bash -s' < link-installers.sh - else - # build and install only the singularity images (kubernetes cluster deployments are separate) - ./deploy-action-kubernetes.sh \ - --ssh-user ${{ secrets.CD_FULL_USER }} \ - --install-singularity \ - ${VCELL_MANAGER_NODE} \ - ./${VCELL_CONFIG_FILE_NAME} fi - name: Capitalize first character of site name id: capitalize diff --git a/docker/build/Dockerfile-submit-dev b/docker/build/Dockerfile-submit-dev index fd50b2cf06..b547ea2da7 100644 --- a/docker/build/Dockerfile-submit-dev +++ b/docker/build/Dockerfile-submit-dev @@ -71,7 +71,6 @@ ENV softwareVersion=SOFTWARE-VERSION-NOT-SET \ htc_vcellbatch_docker_name="htc-vcellbatch-docker-name-not-set" \ htc_vcellbatch_solver_list="htc-vcellbatch-solver-list-not-set" \ htc_vcellopt_docker_name="htc-vcellopt-docker-name-not-set" \ - opt_singularity_imagefile=/path/to/external/opt/singularity_opt.img \ batchhost="batch-host-not-set" \ batchuser="batch-user-not-set" \ slurm_cmd_sbatch=sbatch \ diff --git a/docker/swarm/deploy-action-kubernetes.sh b/docker/swarm/deploy-action-kubernetes.sh index 7ecce0a8a1..0d9ffd258a 100755 --- a/docker/swarm/deploy-action-kubernetes.sh +++ b/docker/swarm/deploy-action-kubernetes.sh @@ -3,7 +3,7 @@ set -ux show_help() { - echo "Deploys vcell client installers, webhelp and singularity images for a Kubernetes deploy" + echo "Deploys vcell client installers and webhelp for a Kubernetes deploy" echo "" echo "usage: deploy-action-kubernetes.sh [OPTIONS] REQUIRED-ARGUMENTS" echo "" @@ -30,14 +30,11 @@ show_help() { echo " --webhelp-deploy-dir /remote/path/to/web/VCell_Help" echo " directory for deployed html webhelp published on web server" echo "" - echo " --install-singularity optionally install batch and opt singularity images on each compute node in 'vcell' SLURM partition" - echo "" echo "" echo "example:" echo "" echo "deploy-action-kubernetes.sh \\" echo " --ssh-user vcell \\" - echo " --install_singularity \\" echo " --build_installers --installer_deploy_dir /share/apps/vcell3/apache_webroot/htdocs/webstart/Alpha \\" echo " --webhelp_local_dir ../../vcell-client/target/classes/vcellDoc \\" echo " --webhelp_deploy_dir /share/apps/vcell3/apache_webroot/htdocs/webstart/VCell_Tutorials/VCell_Help \\" @@ -55,7 +52,6 @@ installer_deploy_dir= webhelp_local_dir= webhelp_deploy_dir= build_installers=false -install_singularity=false while :; do case $1 in -h|--help) @@ -78,9 +74,6 @@ while :; do shift webhelp_deploy_dir=$1 ;; - --install-singularity) - install_singularity=true - ;; --build-installers) build_installers=true ;; @@ -106,50 +99,6 @@ local_config_file=$2 vcell_siteCamel=$(grep VCELL_SITE_CAMEL "$local_config_file" | cut -d"=" -f2) vcell_version=$(grep VCELL_VERSION_NUMBER "$local_config_file" | cut -d"=" -f2) vcell_build=$(grep VCELL_BUILD_NUMBER "$local_config_file" | cut -d"=" -f2) -batch_singularity_filename=$(grep VCELL_BATCH_SINGULARITY_FILENAME "$local_config_file" | cut -d"=" -f2) -opt_singularity_filename=$(grep VCELL_OPT_SINGULARITY_FILENAME "$local_config_file" | cut -d"=" -f2) -slurm_singularity_central_dir=$(grep VCELL_SLURM_CENTRAL_SINGULARITY_DIR "$local_config_file" | cut -d"=" -f2) - - -# -# install the singularity images on the cluster nodes -# -if [ "$install_singularity" == "true" ]; then - - echo "" - pushd ../build/singularity-vm || (echo "pushd ../build/singularity-vm failed"; exit 1) - echo "" - echo "CURRENT DIRECTORY IS $PWD" - - # - # get configuration from config file and load into current bash environment - # - echo "" - - if [ ! -e "./${batch_singularity_filename}" ]; then - echo "failed to find local batch singularity image file $batch_singularity_filename in ./singularity-vm directory" - exit 1 - fi - - if ! scp "./${batch_singularity_filename}" "$ssh_user@$manager_node:${slurm_singularity_central_dir}"; then - echo "failed to copy batch singularity image to server" - exit 1 - fi - - if [ ! -e "./${opt_singularity_filename}" ]; then - echo "failed to find local opt singularity image file $opt_singularity_filename in ./singularity-vm directory" - exit 1 - fi - - if ! scp "./${opt_singularity_filename}" "$ssh_user@$manager_node:${slurm_singularity_central_dir}"; then - echo "failed to copy opt singularity image to server" - exit 1 - fi - - echo "popd" - popd || (echo "popd failed"; exit 1) -fi - # # if --build-installers, then generate client installers, placing then in ./generated_installers