From 67c050c201f92e855d09211e41a5f51ee1cb5230 Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Tue, 31 Oct 2023 19:00:47 +0000 Subject: [PATCH 01/14] Add missing export for rCDUMP in stage_ic (#2009) rCDUMP is needed by the exscript but was never exported by the jjob. --- jobs/JGLOBAL_STAGE_IC | 1 + 1 file changed, 1 insertion(+) diff --git a/jobs/JGLOBAL_STAGE_IC b/jobs/JGLOBAL_STAGE_IC index 317231871e..4c94990fde 100755 --- a/jobs/JGLOBAL_STAGE_IC +++ b/jobs/JGLOBAL_STAGE_IC @@ -7,6 +7,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "stage_ic" -c "base stage_ic" # shellcheck disable=SC2153 rCDUMP=${CDUMP} [[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas" +export rCDUMP # Execute the Script "${HOMEgfs}/scripts/exglobal_stage_ic.sh" From 517b92f9c20c9e44abba3183325e35a95a7334a5 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Tue, 31 Oct 2023 16:33:11 -0400 Subject: [PATCH 02/14] Parallelize the build scripts (#1998) Optimized/parallelized build scripts. A few notes: 1) The default number of build jobs is 20, but one could argue for using 40. When researching this, I looked up what the SRW is compiling with. That system uses 40 cores, which seems a little excessive, but on testing the global workflow, the actual number of cores being used at any given time rarely exceeds 16 when running with 40 cores. This is because the builds tend to use multiple threads in the beginning when compiling low-level modules while the higher-level modules are more or less serial AND because the GDASApp takes several minutes to initialize all of its subrepositories by which time the smaller builds are complete. 2) I also updated checkout.sh so that all checkouts are simultaneous. The CPU load for `git submodule` is quite low, so running 16 instead of 8 jobs at once is not much more expensive. 3) To make this work, I had to add `-j` options to most of the build scripts. The only exception is build_upp, for which the build script within the UPP is hard coded to use 6 cores. 4) I fixed a few small bugs in the build scripts along the way. 5) Lastly, this reduce the total build time from ~2.5 hours for the entire system (including GDAS and GSI in the same build) to ~40 minutes when running with `-j 40`. Resolves #1978 --- sorc/build_all.sh | 293 +++++++++++++++++++------------------- sorc/build_gdas.sh | 3 +- sorc/build_gfs_utils.sh | 8 +- sorc/build_gsi_enkf.sh | 5 +- sorc/build_gsi_monitor.sh | 5 +- sorc/build_gsi_utils.sh | 5 +- sorc/build_ufs.sh | 13 +- sorc/build_ufs_utils.sh | 22 ++- sorc/build_upp.sh | 2 - sorc/build_ww3prepost.sh | 8 +- sorc/checkout.sh | 2 +- sorc/gfs_build.cfg | 2 +- sorc/partial_build.sh | 3 +- 13 files changed, 197 insertions(+), 174 deletions(-) diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 19f1ae6c92..95183f9065 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -16,13 +16,15 @@ function _usage() { Builds all of the global-workflow components by calling the individual build scripts in sequence. -Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-v] +Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v] -a UFS_app: Build a specific UFS app instead of the default -c build_config: Selectively build based on the provided config instead of the default config -h: print this help message and exit + -j: + Specify maximum number of build jobs (n) -v: Execute all build scripts with -v option to turn on verbose where supported EOF @@ -33,25 +35,25 @@ script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) cd "${script_dir}" || exit 1 _build_ufs_opt="" -_ops_opt="" _verbose_opt="" _partial_opt="" +_build_job_max=20 # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:c:hov" option; do +while getopts ":a:c:j:hv" option; do case "${option}" in a) _build_ufs_opt+="-a ${OPTARG} ";; c) _partial_opt+="-c ${OPTARG} ";; h) _usage;; - o) _ops_opt+="-o";; + j) _build_job_max="${OPTARG} ";; v) _verbose_opt="-v";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage + _usage ;; *) echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage + _usage ;; esac done @@ -105,170 +107,161 @@ ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} # shellcheck disable= err=0 -#------------------------------------ -# build gfs_utils -#------------------------------------ -if [[ ${Build_gfs_utils} == 'true' ]]; then - echo " .... Building gfs_utils .... " - # shellcheck disable=SC2086,SC2248 - ./build_gfs_utils.sh ${_verbose_opt} > "${logs_dir}/build_gfs_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gfs_utils." - echo "The log file is in ${logs_dir}/build_gfs_utils.log" - fi - err=$((err + rc)) -fi +declare -A build_jobs +declare -A build_opts #------------------------------------ -# build WW3 pre & post execs +# Check which builds to do and assign # of build jobs #------------------------------------ -if [[ ${Build_ww3_prepost} == "true" ]]; then - echo " .... Building WW3 pre and post execs .... " - # shellcheck disable=SC2086,SC2248 - ./build_ww3prepost.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ww3_prepost.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building WW3 pre/post processing." - echo "The log file is in ${logs_dir}/build_ww3_prepost.log" - fi - err=$((err + rc)) -fi -#------------------------------------ -# build forecast model -#------------------------------------ +# Mandatory builds, unless otherwise specified, for the UFS +big_jobs=0 if [[ ${Build_ufs_model} == 'true' ]]; then - echo " .... Building forecast model .... " - # shellcheck disable=SC2086,SC2248 - ./build_ufs.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ufs.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building UFS model." - echo "The log file is in ${logs_dir}/build_ufs.log" - fi - err=$((err + rc)) + build_jobs["ufs"]=8 + big_jobs=$((big_jobs+1)) + build_opts["ufs"]="${_verbose_opt} ${_build_ufs_opt}" fi - -#------------------------------------ -# build GSI and EnKF - optional checkout -#------------------------------------ -if [[ -d gsi_enkf.fd ]]; then - if [[ ${Build_gsi_enkf} == 'true' ]]; then - echo " .... Building gsi and enkf .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_enkf.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_enkf.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi_enkf." - echo "The log file is in ${logs_dir}/build_gsi_enkf.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi and enkf .... " +# The UPP is hardcoded to use 6 cores +if [[ ${Build_upp} == 'true' ]]; then + build_jobs["upp"]=6 + build_opts["upp"]="" fi - -#------------------------------------ -# build gsi utilities -#------------------------------------ -if [[ -d gsi_utils.fd ]]; then - if [[ ${Build_gsi_utils} == 'true' ]]; then - echo " .... Building gsi utilities .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_utils.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi utilities." - echo "The log file is in ${logs_dir}/build_gsi_utils.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi utilities .... " +if [[ ${Build_ufs_utils} == 'true' ]]; then + build_jobs["ufs_utils"]=3 + build_opts["ufs_utils"]="${_verbose_opt}" +fi +if [[ ${Build_gfs_utils} == 'true' ]]; then + build_jobs["gfs_utils"]=1 + build_opts["gfs_utils"]="${_verbose_opt}" +fi +if [[ ${Build_ww3prepost} == "true" ]]; then + build_jobs["ww3prepost"]=3 + build_opts["ww3prepost"]="${_verbose_opt} ${_build_ufs_opt}" fi -#------------------------------------ -# build gdas - optional checkout -#------------------------------------ +# Optional DA builds if [[ -d gdas.cd ]]; then - if [[ ${Build_gdas} == 'true' ]]; then - echo " .... Building GDASApp .... " - # shellcheck disable=SC2086,SC2248 - ./build_gdas.sh ${_verbose_opt} > "${logs_dir}/build_gdas.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building GDASApp." - echo "The log file is in ${logs_dir}/build_gdas.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building GDASApp .... " + build_jobs["gdas"]=16 + big_jobs=$((big_jobs+1)) + build_opts["gdas"]="${_verbose_opt}" +fi +if [[ -d gsi_enkf.fd ]]; then + build_jobs["gsi_enkf"]=8 + big_jobs=$((big_jobs+1)) + build_opts["gsi_enkf"]="${_verbose_opt}" +fi +if [[ -d gsi_utils.fd ]]; then + build_jobs["gsi_utils"]=2 + build_opts["gsi_utils"]="${_verbose_opt}" fi - -#------------------------------------ -# build gsi monitor -#------------------------------------ if [[ -d gsi_monitor.fd ]]; then - if [[ ${Build_gsi_monitor} == 'true' ]]; then - echo " .... Building gsi monitor .... " - # shellcheck disable=SC2086,SC2248 - ./build_gsi_monitor.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_monitor.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building gsi monitor." - echo "The log file is in ${logs_dir}/build_gsi_monitor.log" - fi - err=$((err + rc)) - fi -else - echo " .... Skip building gsi monitor .... " + build_jobs["gsi_monitor"]=1 + build_opts["gsi_monitor"]="${_verbose_opt}" fi -#------------------------------------ -# build UPP -#------------------------------------ -if [[ ${Build_upp} == 'true' ]]; then - echo " .... Building UPP .... " - # shellcheck disable=SC2086,SC2248 - ./build_upp.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_upp.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building UPP." - echo "The log file is in ${logs_dir}/build_upp.log" - fi - err=$((err + rc)) -fi +# Go through all builds and adjust CPU counts down if necessary +requested_cpus=0 +build_list="" +for build in "${!build_jobs[@]}"; do + if [[ -z "${build_list}" ]]; then + build_list="${build}" + else + build_list="${build_list}, ${build}" + fi + if [[ ${build_jobs[${build}]} -gt ${_build_job_max} ]]; then + build_jobs[${build}]=${_build_job_max} + fi + requested_cpus=$(( requested_cpus + build_jobs[${build}] )) +done -#------------------------------------ -# build ufs_utils -#------------------------------------ -if [[ ${Build_ufs_utils} == 'true' ]]; then - echo " .... Building ufs_utils .... " - # shellcheck disable=SC2086,SC2248 - ./build_ufs_utils.sh ${_verbose_opt} > "${logs_dir}/build_ufs_utils.log" 2>&1 - # shellcheck disable= - rc=$? - if (( rc != 0 )) ; then - echo "Fatal error in building ufs_utils." - echo "The log file is in ${logs_dir}/build_ufs_utils.log" - fi - err=$((err + rc)) +echo "Building ${build_list}" + +# Go through all builds and adjust CPU counts up if possible +if [[ ${requested_cpus} -lt ${_build_job_max} && ${big_jobs} -gt 0 ]]; then + # Add cores to the gdas, ufs, and gsi build jobs + extra_cores=$(( _build_job_max - requested_cpus )) + extra_cores=$(( extra_cores / big_jobs )) + for build in "${!build_jobs[@]}"; do + if [[ "${build}" == "gdas" || "${build}" == "ufs" || "${build}" == "gsi_enkf" ]]; then + build_jobs[${build}]=$(( build_jobs[${build}] + extra_cores )) + fi + done fi +procs_in_use=0 +declare -A build_ids + +builds_started=0 +# Now start looping through all of the jobs until everything is done +while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do + for build in "${!build_jobs[@]}"; do + # Has the job started? + if [[ -n "${build_jobs[${build}]+0}" && -z "${build_ids[${build}]+0}" ]]; then + # Do we have enough processors to run it? + if [[ ${_build_job_max} -ge $(( build_jobs[build] + procs_in_use )) ]]; then + if [[ "${build}" != "upp" ]]; then + "./build_${build}.sh" -j "${build_jobs[${build}]}" "${build_opts[${build}]:-}" > \ + "${logs_dir}/build_${build}.log" 2>&1 & + else + "./build_${build}.sh" "${build_opts[${build}]}" > \ + "${logs_dir}/build_${build}.log" 2>&1 & + fi + build_ids["${build}"]=$! + echo "Starting build_${build}.sh" + procs_in_use=$(( procs_in_use + build_jobs[${build}] )) + fi + fi + done + + # Check if all builds have completed + # Also recalculate how many processors are in use to account for completed builds + builds_started=0 + procs_in_use=0 + for build in "${!build_jobs[@]}"; do + # Has the build started? + if [[ -n "${build_ids[${build}]+0}" ]]; then + builds_started=$(( builds_started + 1)) + # Calculate how many processors are in use + # Is the build still running? + if ps -p "${build_ids[${build}]}" > /dev/null; then + procs_in_use=$(( procs_in_use + build_jobs["${build}"] )) + fi + fi + done + + sleep 5s +done + +# Wait for all jobs to complete and check return statuses +errs=0 +while [[ ${#build_jobs[@]} -gt 0 ]]; do + for build in "${!build_jobs[@]}"; do + # Test if each job is complete and if so, notify and remove from the array + if [[ -n "${build_ids[${build}]+0}" ]]; then + if ! ps -p "${build_ids[${build}]}" > /dev/null; then + wait "${build_ids[${build}]}" + build_stat=$? + errs=$((errs+build_stat)) + if [[ ${build_stat} == 0 ]]; then + echo "build_${build}.sh completed successfully!" + else + echo "build_${build}.sh failed with status ${build_stat}!" + fi + + # Remove the completed build from the list of PIDs + unset 'build_ids[${build}]' + unset 'build_jobs[${build}]' + fi + fi + done + + sleep 5s +done + #------------------------------------ # Exception Handling #------------------------------------ -if (( err != 0 )); then +if (( errs != 0 )); then cat << EOF BUILD ERROR: One or more components failed to build Check the associated build log(s) for details. diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh index 39cf5ac9a7..b1a17c33dd 100755 --- a/sorc/build_gdas.sh +++ b/sorc/build_gdas.sh @@ -2,9 +2,10 @@ set -eux OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; + j) export BUILD_JOBS=${OPTARG};; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh index 2a7a611239..09bd4a9656 100755 --- a/sorc/build_gfs_utils.sh +++ b/sorc/build_gfs_utils.sh @@ -5,11 +5,13 @@ function usage() { cat << EOF Builds the GFS utility programs. -Usage: ${BASH_SOURCE[0]} [-d][-h][-v] +Usage: ${BASH_SOURCE[0]} [-d][-h][-j n][-v] -d: Build with debug options -h: Print this help message and exit + -j: + Build with n build jobs -v: Turn on verbose output EOF @@ -19,10 +21,11 @@ EOF cwd=$(pwd) OPTIND=1 -while getopts ":dvh" option; do +while getopts ":j:dvh" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; v) export BUILD_VERBOSE="YES";; + j) export BUILD_JOBS="${OPTARG}";; h) usage ;; @@ -40,6 +43,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ "${cwd}/gfs_utils.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh index 671c3d6205..9ba278e3ec 100755 --- a/sorc/build_gsi_enkf.sh +++ b/sorc/build_gsi_enkf.sh @@ -2,10 +2,10 @@ set -eux OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -21,6 +21,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ GSI_MODE=GFS \ ENKF_MODE=GFS \ REGRESSION_TESTS=NO \ diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh index ec3645e52f..3de1262aac 100755 --- a/sorc/build_gsi_monitor.sh +++ b/sorc/build_gsi_monitor.sh @@ -4,10 +4,10 @@ set -eux cwd=$(pwd) OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -23,6 +23,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ "${cwd}/gsi_monitor.fd/ush/build.sh" exit diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh index bcbc110cf6..81eab0f628 100755 --- a/sorc/build_gsi_utils.sh +++ b/sorc/build_gsi_utils.sh @@ -4,10 +4,10 @@ set -eux cwd=$(pwd) OPTIND=1 -while getopts ":dov" option; do +while getopts ":j:dv" option; do case "${option}" in d) export BUILD_TYPE="DEBUG";; - o) _ops="YES";; # TODO - unused; remove? + j) export BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -23,6 +23,7 @@ shift $((OPTIND-1)) BUILD_TYPE=${BUILD_TYPE:-"Release"} \ BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS=${BUILD_JOBS:-8} \ UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ "${cwd}/gsi_utils.fd/ush/build.sh" diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index aa7183c006..aaeeefea23 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -7,10 +7,11 @@ cwd=$(pwd) APP="S2SWA" CCPP_SUITES="FV3_GFS_v17_p8,FV3_GFS_v17_coupled_p8" # TODO: does the g-w need to build with all these CCPP_SUITES? -while getopts ":da:v" option; do +while getopts ":da:j:v" option; do case "${option}" in d) BUILD_TYPE="DEBUG";; - a) APP="${OPTARG}" ;; + a) APP="${OPTARG}";; + j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -38,21 +39,23 @@ if [[ "${MACHINE_ID}" != "noaacloud" ]]; then mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua else - + if [[ "${PW_CSP:-}" == "aws" ]]; then + set +x # TODO: This will need to be addressed further when the EPIC stacks are available/supported. module use /contrib/spack-stack/envs/ufswm/install/modulefiles/Core module load stack-intel module load stack-intel-oneapi-mpi module load ufs-weather-model-env/1.0.0 - # TODO: It is still uncertain why this is the only module that is + # TODO: It is still uncertain why this is the only module that is # missing; check the spack build as this needed to be added manually. module load w3emc/2.9.2 # TODO: This has similar issues for the EPIC stack. module list + set -x fi export CMAKE_FLAGS="${MAKE_OPT}" - ./build.sh + BUILD_JOBS=${BUILD_JOBS:-8} ./build.sh mv "${cwd}/ufs_model.fd/build/ufs_model" "${cwd}/ufs_model.fd/tests/ufs_model.x" fi diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index 5e2edf0737..e78ca3c180 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -1,10 +1,30 @@ #! /usr/bin/env bash set -eux +OPTIND=1 +while getopts ":j:dv" option; do + case "${option}" in + j) export BUILD_JOBS="${OPTARG}";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + script_dir=$(dirname "${BASH_SOURCE[0]}") cd "${script_dir}/ufs_utils.fd" || exit 1 -CMAKE_OPTS="-DGFS=ON" ./build_all.sh +CMAKE_OPTS="-DGFS=ON" \ +BUILD_JOBS=${BUILD_JOBS:-8} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-} \ +./build_all.sh exit diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index a00650ba16..a55e96ebc8 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -12,11 +12,9 @@ while getopts ":dv" option; do v) _opts+="-v ";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" - usage ;; *) echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" - usage ;; esac done diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh index 7024a7bba1..919afaacb3 100755 --- a/sorc/build_ww3prepost.sh +++ b/sorc/build_ww3prepost.sh @@ -7,9 +7,10 @@ cd "${script_dir}" || exit 1 # Default settings APP="S2SWA" -while getopts "a:v" option; do +while getopts ":j:a:v" option; do case "${option}" in - a) APP="${OPTARG}" ;; + a) APP="${OPTARG}";; + j) BUILD_JOBS="${OPTARG}";; v) export BUILD_VERBOSE="YES";; :) echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" @@ -58,6 +59,7 @@ post_exes="ww3_outp ww3_outf ww3_outp ww3_gint ww3_ounf ww3_ounp ww3_grib" #create build directory: path_build="${WW3_DIR}/build_SHRD" +[[ -d "${path_build}" ]] && rm -rf "${path_build}" mkdir -p "${path_build}" || exit 1 cd "${path_build}" || exit 1 echo "Forcing a SHRD build" @@ -85,7 +87,7 @@ if (( rc != 0 )); then echo "Fatal error in cmake." exit "${rc}" fi -make -j 8 +make -j "${BUILD_JOBS:-8}" rc=$? if (( rc != 0 )); then echo "Fatal error in make." diff --git a/sorc/checkout.sh b/sorc/checkout.sh index a756c8d040..de4fcdf838 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -151,7 +151,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 # Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" ; errs=$((errs + $?)) +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" & # Run all other checkouts simultaneously with just 1 core each to handle submodules. checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" & diff --git a/sorc/gfs_build.cfg b/sorc/gfs_build.cfg index 4dd9b1b74e..8c171072d0 100644 --- a/sorc/gfs_build.cfg +++ b/sorc/gfs_build.cfg @@ -2,7 +2,7 @@ # ***** configuration of global-workflow build ***** Building ufs_model (ufs_model) ........................ yes - Building ww3_prepost (ww3_prepost) .................... yes + Building ww3prepost (ww3prepost) ...................... yes Building gsi_enkf (gsi_enkf) .......................... yes Building gsi_utils (gsi_utils) ........................ yes Building gsi_monitor (gsi_monitor) .................... yes diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index 64f1a705ec..34b8b557ce 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -3,11 +3,10 @@ # define the array of the name of build program # declare -a Build_prg=("Build_ufs_model" \ - "Build_ww3_prepost" \ + "Build_ww3prepost" \ "Build_gsi_enkf" \ "Build_gsi_utils" \ "Build_gsi_monitor" \ - "Build_ww3_prepost" \ "Build_gdas" \ "Build_upp" \ "Build_ufs_utils" \ From 77be0ec6a3117b9f7e71a19c69ac64c09b521ecd Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Wed, 1 Nov 2023 12:09:33 -0600 Subject: [PATCH 03/14] No longer write archive lists COM (#2000) Changes the respective `.txt` files used for the HPSS archive to the `DATA` path. Each defined text file has been updated to explicitly write to `DATA` rather than `COM` such that the files are deleted rather than being archived. Resolves #760 --- scripts/exglobal_archive.sh | 12 +- ush/hpssarch_gen.sh | 214 ++++++++++++++++++------------------ 2 files changed, 110 insertions(+), 116 deletions(-) diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index dcc864e223..18217f4efc 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -154,11 +154,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then mod=$((nday % ARCH_FCSTICFREQ)) if [[ "${mod}" -eq 0 ]] || [[ "${PDY}${cyc}" -eq "${firstday}" ]]; then SAVEFCSTIC="YES" ; fi - - ARCH_LIST="${DATA}/archlist" - [[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" - mkdir -p "${ARCH_LIST}" - cd "${ARCH_LIST}" || exit 2 + cd "${DATA}" || exit 2 "${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" status=$? @@ -196,7 +192,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then if [ "${DO_AERO}" = "YES" ]; then for targrp in chem; do # TODO: Why is this tar being done here instead of being added to the list? - ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${DATA}/${targrp}.txt") status=$? if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then echo "HTAR ${PDY}${cyc} ${targrp}.tar failed" @@ -276,7 +272,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then break fi fi - done < "${ARCH_LIST}/${targrp}.txt" + done < "${DATA}/${targrp}.txt" ;; *) ;; @@ -284,7 +280,7 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then # Create the tarball tar_fl="${ATARDIR}/${PDY}${cyc}/${targrp}.tar" - ${TARCMD} -P -cvf "${tar_fl}" $(cat "${ARCH_LIST}/${targrp}.txt") + ${TARCMD} -P -cvf "${tar_fl}" $(cat "${DATA}/${targrp}.txt") status=$? # Change group to rstprod if it was found even if htar/tar failed in case of partial creation diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index f09c816e93..8d8ebf4fe9 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -27,30 +27,30 @@ if [[ ${type} = "gfs" ]]; then FHMAX_HF_GFS=${FHMAX_HF_GFS:-120} FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} - rm -f gfsa.txt - rm -f gfsb.txt - rm -f gfs_restarta.txt - touch gfsa.txt - touch gfsb.txt - touch gfs_restarta.txt + rm -f "${DATA}/gfsa.txt" + rm -f "${DATA}/gfsb.txt" + rm -f "${DATA}/gfs_restarta.txt" + touch "${DATA}/gfsa.txt" + touch "${DATA}/gfsb.txt" + touch "${DATA}/gfs_restarta.txt" if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then - rm -f gfs_pgrb2b.txt - rm -f gfs_netcdfb.txt - rm -f gfs_flux.txt - touch gfs_pgrb2b.txt - touch gfs_netcdfb.txt - touch gfs_flux.txt + rm -f "${DATA}/gfs_pgrb2b.txt" + rm -f "${DATA}/gfs_netcdfb.txt" + rm -f "${DATA}/gfs_flux.txt" + touch "${DATA}/gfs_pgrb2b.txt" + touch "${DATA}/gfs_netcdfb.txt" + touch "${DATA}/gfs_flux.txt" if [[ ${MODE} = "cycled" ]]; then - rm -f gfs_netcdfa.txt - touch gfs_netcdfa.txt + rm -f "${DATA}/gfs_netcdfa.txt" + touch "${DATA}/gfs_netcdfa.txt" fi fi if [[ ${DO_DOWN} = "YES" ]]; then - rm -f gfs_downstream.txt - touch gfs_downstream.txt + rm -f "${DATA}/gfs_downstream.txt" + touch "${DATA}/gfs_downstream.txt" fi head="gfs.t${cyc}z." @@ -61,7 +61,7 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl.idx" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl.idx" - } >> gfs_pgrb2b.txt + } >> "${DATA}/gfs_pgrb2b.txt" if [[ ${MODE} = "cycled" ]]; then { @@ -73,7 +73,7 @@ if [[ ${type} = "gfs" ]]; then for file in "${gsida_files[@]}"; do [[ -s ${COM_ATMOS_ANALYSIS}/${head}${file} ]] && echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}${file}" done - } >> gfs_netcdfa.txt + } >> "${DATA}/gfs_netcdfa.txt" fi fh=0 @@ -82,7 +82,7 @@ if [[ ${type} = "gfs" ]]; then { echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" - } >> gfs_netcdfb.txt + } >> "${DATA}/gfs_netcdfb.txt" fh=$((fh+ARCH_GAUSSIAN_FHINC)) done fi @@ -130,7 +130,7 @@ if [[ ${type} = "gfs" ]]; then for file in "${genesis_files[@]}"; do [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" done - } >> gfsa.txt + } >> "${DATA}/gfsa.txt" { if [[ ${DO_DOWN} = "YES" ]]; then @@ -142,14 +142,14 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/gfs.t${cyc}z.bufrsnd.tar.gz" fi fi - } >> gfs_downstream.txt + } >> "${DATA}/gfs_downstream.txt" { echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl" echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl.idx" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" - } >> gfsb.txt + } >> "${DATA}/gfsb.txt" fh=0 @@ -159,7 +159,7 @@ if [[ ${type} = "gfs" ]]; then { echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" - } >> gfs_flux.txt + } >> "${DATA}/gfs_flux.txt" { echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" @@ -168,14 +168,14 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}.idx" fi - } >> gfs_pgrb2b.txt + } >> "${DATA}/gfs_pgrb2b.txt" fi { echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" - } >> gfsa.txt + } >> "${DATA}/gfsa.txt" { @@ -187,7 +187,7 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" fi - } >> gfsb.txt + } >> "${DATA}/gfsb.txt" inc=${FHOUT_GFS} if (( FHMAX_HF_GFS > 0 && FHOUT_HF_GFS > 0 && fh < FHMAX_HF_GFS )); then @@ -221,14 +221,14 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile5.nc" echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile6.nc" fi - } >> gfs_restarta.txt + } >> "${DATA}/gfs_restarta.txt" #.................. if [[ ${DO_WAVE} = "YES" ]]; then - rm -rf gfswave.txt - touch gfswave.txt + rm -rf "${DATA}/gfswave.txt" + touch "${DATA}/gfswave.txt" head="gfswave.t${cyc}z." @@ -237,60 +237,60 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_WAVE_HISTORY/${ROTDIR}\//}/ww3_multi*" echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" - } >> gfswave.txt + } >> "${DATA}/gfswave.txt" fi if [[ ${DO_OCN} = "YES" ]]; then head="gfs.t${cyc}z." - rm -f gfs_flux_1p00.txt - rm -f ocn_ice_grib2_0p5.txt - rm -f ocn_ice_grib2_0p25.txt - rm -f ocn_2D.txt - rm -f ocn_3D.txt - rm -f ocn_xsect.txt - rm -f ocn_daily.txt - touch gfs_flux_1p00.txt - touch ocn_ice_grib2_0p5.txt - touch ocn_ice_grib2_0p25.txt - touch ocn_2D.txt - touch ocn_3D.txt - touch ocn_xsect.txt - touch ocn_daily.txt - echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> ocn_2D.txt - echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> ocn_2D.txt - echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> ocn_3D.txt - echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> ocn_xsect.txt - echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> ocn_daily.txt - echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> ocn_ice_grib2_0p5.txt - echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> ocn_ice_grib2_0p25.txt + rm -f "${DATA}/gfs_flux_1p00.txt" + rm -f "${DATA}/ocn_ice_grib2_0p5.txt" + rm -f "${DATA}/ocn_ice_grib2_0p25.txt" + rm -f "${DATA}/ocn_2D.txt" + rm -f "${DATA}/ocn_3D.txt" + rm -f "${DATA}/ocn_xsect.txt" + rm -f "${DATA}/ocn_daily.txt" + touch "${DATA}/gfs_flux_1p00.txt" + touch "${DATA}/ocn_ice_grib2_0p5.txt" + touch "${DATA}/ocn_ice_grib2_0p25.txt" + touch "${DATA}/ocn_2D.txt" + touch "${DATA}/ocn_3D.txt" + touch "${DATA}/ocn_xsect.txt" + touch "${DATA}/ocn_daily.txt" + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> "${DATA}/ocn_2D.txt" + echo "${COM_OCEAN_2D/${ROTDIR}\//}/ocn_2D*" >> "${DATA}/ocn_2D.txt" + echo "${COM_OCEAN_3D/${ROTDIR}\//}/ocn_3D*" >> "${DATA}/ocn_3D.txt" + echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> "${DATA}/ocn_xsect.txt" + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_daily*" >> "${DATA}/ocn_daily.txt" + echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> "${DATA}/ocn_ice_grib2_0p5.txt" + echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> "${DATA}/ocn_ice_grib2_0p25.txt" # Also save fluxes from atmosphere { echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???" echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx" - } >> gfs_flux_1p00.txt + } >> "${DATA}/gfs_flux_1p00.txt" fi if [[ ${DO_ICE} = "YES" ]]; then head="gfs.t${cyc}z." - rm -f ice.txt - touch ice.txt + rm -f "${DATA}/ice.txt" + touch "${DATA}/ice.txt" { echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc" - } >> ice.txt + } >> "${DATA}/ice.txt" fi if [[ ${DO_AERO} = "YES" ]]; then head="gocart" - rm -f chem.txt - touch chem.txt + rm -f "${DATA}/chem.txt" + touch "${DATA}/chem.txt" - echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> chem.txt + echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> "${DATA}/chem.txt" fi #----------------------------------------------------- @@ -303,12 +303,12 @@ fi ##end of gfs if [[ ${type} == "gdas" ]]; then #----------------------------------------------------- - rm -f gdas.txt - rm -f gdas_restarta.txt - rm -f gdas_restartb.txt - touch gdas.txt - touch gdas_restarta.txt - touch gdas_restartb.txt + rm -f "${DATA}/gdas.txt" + rm -f "${DATA}/gdas_restarta.txt" + rm -f "${DATA}/gdas_restartb.txt" + touch "${DATA}/gdas.txt" + touch "${DATA}/gdas_restarta.txt" + touch "${DATA}/gdas_restartb.txt" head="gdas.t${cyc}z." @@ -379,14 +379,14 @@ if [[ ${type} == "gdas" ]]; then echo "${file}.idx" fi done - } >> gdas.txt + } >> "${DATA}/gdas.txt" #.................. if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> gdas_restarta.txt + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> "${DATA}/gdas_restarta.txt" fi if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then - echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> gdas_restarta.txt + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> "${DATA}/gdas_restarta.txt" fi { @@ -422,18 +422,18 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - } >> gdas_restarta.txt + } >> "${DATA}/gdas_restarta.txt" #.................. - echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> gdas_restartb.txt + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> "${DATA}/gdas_restartb.txt" #.................. if [[ ${DO_WAVE} = "YES" ]]; then - rm -rf gdaswave.txt - touch gdaswave.txt - rm -rf gdaswave_restart.txt - touch gdaswave_restart.txt + rm -rf "${DATA}/gdaswave.txt" + touch "${DATA}/gdaswave.txt" + rm -rf "${DATA}/gdaswave_restart.txt" + touch "${DATA}/gdaswave_restart.txt" head="gdaswave.t${cyc}z." @@ -441,21 +441,19 @@ if [[ ${type} == "gdas" ]]; then { echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" - } >> gdaswave.txt + } >> "${DATA}/gdaswave.txt" - echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> gdaswave_restart.txt + echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdaswave_restart.txt" fi #.................. if [[ ${DO_OCN} = "YES" ]]; then - rm -rf gdasocean.txt - touch gdasocean.txt - rm -rf gdasocean_restart.txt - touch gdasocean_restart.txt - rm -rf gdasocean_analysis.txt - touch gdasocean_analysis.txt + rm -rf "${DATA}/gdasocean.txt" + touch "${DATA}/gdasocean.txt" + rm -rf "${DATA}/gdasocean_restart.txt" + touch "${DATA}/gdasocean_restart.txt" head="gdas.t${cyc}z." @@ -463,28 +461,28 @@ if [[ ${type} == "gdas" ]]; then { echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}*" echo "${COM_OCEAN_INPUT/${ROTDIR}\//}" - } >> gdasocean.txt + } >> "${DATA}/gdasocean.txt" { echo "${COM_OCEAN_RESTART/${ROTDIR}\//}/*" echo "${COM_MED_RESTART/${ROTDIR}\//}/*" - } >> gdasocean_restart.txt + } >> "${DATA}/gdasocean_restart.txt" { echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" - } >> gdasocean_analysis.txt + } >> "${DATA}/gdasocean_analysis.txt" fi if [[ ${DO_ICE} = "YES" ]]; then - rm -rf gdasice.txt - touch gdasice.txt - rm -rf gdasice_restart.txt - touch gdasice_restart.txt + rm -rf "${DATA}/gdasice.txt" + touch "${DATA}/gdasice.txt" + rm -rf "${DATA}/gdasice_restart.txt" + touch "${DATA}/gdasice_restart.txt" head="gdas.t${cyc}z." @@ -492,9 +490,9 @@ if [[ ${type} == "gdas" ]]; then { echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" - } >> gdasice.txt + } >> "${DATA}/gdasice.txt" - echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> gdasice_restart.txt + echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> "${DATA}/gdasice_restart.txt" fi @@ -522,8 +520,8 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then head="${RUN}.t${cyc}z." #.................. - rm -f "${RUN}.txt" - touch "${RUN}.txt" + rm -f "${DATA}/${RUN}.txt" + touch "${DATA}/${RUN}.txt" { gsida_files=("enkfstat" @@ -581,19 +579,19 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then fi fh=$((fh+3)) done - } >> "${RUN}.txt" + } >> "${DATA}/${RUN}.txt" #........................... n=1 while (( n <= NTARS )); do #........................... - rm -f "${RUN}_grp${n}.txt" - rm -f "${RUN}_restarta_grp${n}.txt" - rm -f "${RUN}_restartb_grp${n}.txt" - touch "${RUN}_grp${n}.txt" - touch "${RUN}_restarta_grp${n}.txt" - touch "${RUN}_restartb_grp${n}.txt" + rm -f "${DATA}/${RUN}_grp${n}.txt" + rm -f "${DATA}/${RUN}_restarta_grp${n}.txt" + rm -f "${DATA}/${RUN}_restartb_grp${n}.txt" + touch "${DATA}/${RUN}_grp${n}.txt" + touch "${DATA}/${RUN}_restarta_grp${n}.txt" + touch "${DATA}/${RUN}_restartb_grp${n}.txt" m=1 while (( m <= NMEM_EARCGRP )); do @@ -618,11 +616,11 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" fi fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" \ - >> "${RUN}_restarta_grp${n}.txt" + >> "${DATA}/${RUN}_restarta_grp${n}.txt" fi else @@ -635,10 +633,10 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" fi fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" \ - >> "${RUN}_restarta_grp${n}.txt" + >> "${DATA}/${RUN}_restarta_grp${n}.txt" fi fi { @@ -646,7 +644,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then if (( FHR == 6 )); then echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}sfcf00${FHR}.nc" fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" done # loop over FHR if [[ ${lobsdiag_forenkf} == ".false." ]] ; then @@ -655,7 +653,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]] ; then echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" fi - } >> "${RUN}_grp${n}.txt" + } >> "${DATA}/${RUN}_grp${n}.txt" { if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}radstat" ]]; then @@ -668,7 +666,7 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_air" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_int" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_pc" - } >> "${RUN}_restarta_grp${n}.txt" + } >> "${DATA}/${RUN}_restarta_grp${n}.txt" fi #--- { @@ -678,9 +676,9 @@ if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" - } >> "${RUN}_restarta_grp${n}.txt" + } >> "${DATA}/${RUN}_restarta_grp${n}.txt" #--- - echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${RUN}_restartb_grp${n}.txt" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${DATA}/${RUN}_restartb_grp${n}.txt" m=$((m+1)) done From 241742b246da1794646b68e41c3d6fe00b95dd07 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:19:55 -0400 Subject: [PATCH 04/14] Updates to prep ocean obs task (#1870) --- jobs/JGLOBAL_PREP_OCEAN_OBS | 10 +++++++++- parm/config/gfs/config.prepoceanobs | 8 ++++++-- sorc/link_workflow.sh | 1 + 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/jobs/JGLOBAL_PREP_OCEAN_OBS b/jobs/JGLOBAL_PREP_OCEAN_OBS index a8fa86d4e5..d5064859d2 100755 --- a/jobs/JGLOBAL_PREP_OCEAN_OBS +++ b/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -7,15 +7,23 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "prepoceanobs" -c "base prepoceanobs" # Set variables used in the script ############################################## +export COMIN_OBS="${DATA}" + ############################################## # Begin JOB SPECIFIC work ############################################## +# Add prep_marine_obs.py to PYTHONPATH +export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -# the relevant script goes here +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + ############################################## # End JOB SPECIFIC work diff --git a/parm/config/gfs/config.prepoceanobs b/parm/config/gfs/config.prepoceanobs index 051769b6ee..c5a9ddfd22 100644 --- a/parm/config/gfs/config.prepoceanobs +++ b/parm/config/gfs/config.prepoceanobs @@ -1,9 +1,13 @@ #!/bin/bash ########## config.prepoceanobs ########## -# Pre Ocn Analysis specific -echo "BEGIN: config.config.prepoceanobs" +echo "BEGIN: config.prepoceanobs" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST=@SOCA_OBS_LIST@ +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} # Get task specific resources . "${EXPDIR}/config.resources" prepoceanobs diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 0e2cdf5edd..d2328caba9 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -205,6 +205,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/jediinc2fv3.py" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/ioda/bufr2ioda/run_bufr2ioda.py" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/scripts/exglobal_prep_ocean_obs.py" . fi From 247cdf8c7e10e6c98f782799fc51c069b6474e70 Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Thu, 2 Nov 2023 11:18:53 -0600 Subject: [PATCH 05/14] Corrects missing local variable reference (#2023) Fixes missing `$` in variable reference in archive job. Resolves #2022 --- ush/hpssarch_gen.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 8d8ebf4fe9..2caf0040ef 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -165,8 +165,8 @@ if [[ ${type} = "gfs" ]]; then echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}.idx" if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2b.1p00.f${fhr}" ]]; then - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}" - echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.f${fhr}.idx" fi } >> "${DATA}/gfs_pgrb2b.txt" fi From 31b05a99b77d0f6eaa8b116485a41a43867d20e1 Mon Sep 17 00:00:00 2001 From: "Henry R. Winterbottom" <49202169+HenryWinterbottom-NOAA@users.noreply.github.com> Date: Thu, 2 Nov 2023 12:23:32 -0600 Subject: [PATCH 06/14] Move analysis post to new stand-alone task (#1979) This PR addresses issue #1130. A new task `postanl.sh` has been added such that the `anl` post-processing is not it's own task within the Rocoto workflow. This change prevents the GDAS experiment from breaking due to failures caused by the `anl` not being available for the first cycle. Resolves #1130 --- jobs/JGLOBAL_ATMOS_POST | 12 --------- jobs/rocoto/post.sh | 7 +---- jobs/rocoto/postanl.sh | 1 + workflow/applications/gfs_cycled.py | 2 +- workflow/hosts/awspw.yaml | 2 +- workflow/rocoto/gfs_tasks.py | 40 ++++++++++++++++++----------- 6 files changed, 29 insertions(+), 35 deletions(-) create mode 120000 jobs/rocoto/postanl.sh diff --git a/jobs/JGLOBAL_ATMOS_POST b/jobs/JGLOBAL_ATMOS_POST index a0cd8871e1..07890b9df5 100755 --- a/jobs/JGLOBAL_ATMOS_POST +++ b/jobs/JGLOBAL_ATMOS_POST @@ -17,18 +17,6 @@ export g2tmpl_ver=${g2tmpl_ver:-v1.5.0} ############################################## export CDUMP=${RUN/enkf} - -############################################## -# TODO: Remove this egregious HACK -############################################## -if [[ "${SDATE:-}" = "${PDY}${cyc}" ]]; then - if [[ ${post_times} = "anl" ]]; then - echo "No offline post-processing in the first half cycle for analysis" - exit 0 - fi -fi - - ############################################## # Begin JOB SPECIFIC work ############################################## diff --git a/jobs/rocoto/post.sh b/jobs/rocoto/post.sh index e84b2b7b71..c91b9d4679 100755 --- a/jobs/rocoto/post.sh +++ b/jobs/rocoto/post.sh @@ -4,7 +4,6 @@ source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## NCEP post driver script -## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...) ## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...) ############################################################### @@ -16,11 +15,7 @@ status=$? export job="post" export jobid="${job}.$$" -if [ ${FHRGRP} = 'anl' ]; then - fhrlst="anl" -else - fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') -fi +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') #--------------------------------------------------------------- for fhr in ${fhrlst}; do diff --git a/jobs/rocoto/postanl.sh b/jobs/rocoto/postanl.sh new file mode 120000 index 0000000000..29e1fc721f --- /dev/null +++ b/jobs/rocoto/postanl.sh @@ -0,0 +1 @@ +post.sh \ No newline at end of file diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 6eff929d5f..6edd6bc0d8 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -101,7 +101,7 @@ def get_task_names(self): """ gdas_gfs_common_tasks_before_fcst = ['prep'] - gdas_gfs_common_tasks_after_fcst = ['post'] + gdas_gfs_common_tasks_after_fcst = ['postanl', 'post'] # if self.do_ocean: # TODO: uncomment when ocnpost is fixed in cycled mode # gdas_gfs_common_tasks_after_fcst += ['ocnpost'] gdas_gfs_common_tasks_after_fcst += ['vrfy'] diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml index ee4999d4f8..becb38e236 100644 --- a/workflow/hosts/awspw.yaml +++ b/workflow/hosts/awspw.yaml @@ -21,4 +21,4 @@ LOCALARCH: 'NO' ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' # TODO: This will not yet work from AWS. MAKE_NSSTBUFR: 'NO' MAKE_ACFTBUFR: 'NO' -SUPPORTED_RESOLUTIONS: ['C48'] # TODO: Test and support all cubed-sphere resolutions. +SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 55055a46ee..ca9af5af7d 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -549,24 +549,37 @@ def _fcst_cycled(self): return task def post(self): - add_anl_to_post = False - if self.app_config.mode in ['cycled']: - add_anl_to_post = True + return self._post_task('post') + + def postanl(self): + postenvars = self.envars.copy() + postenvar_dict = {'FHRLST': 'anl', + 'ROTDIR': self._base.get('ROTDIR')} + + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) - return self._post_task('post', add_anl_to_post=add_anl_to_post) + deps = [] + atm_anl_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_ANALYSIS_TMPL"]) + data = f'{atm_anl_path}/{self.cdump}.t@Hz.loganl.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + resources = self.get_resource('post') + task = create_wf_task('postanl', resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + cycledef=self.cdump) + + return task def ocnpost(self): if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost in cycled mode - return self._post_task('ocnpost', add_anl_to_post=False) + return self._post_task('ocnpost') - def _post_task(self, task_name, add_anl_to_post=False): + def _post_task(self, task_name): if task_name not in ['post', 'ocnpost']: raise KeyError(f'Invalid post-processing task: {task_name}') - if task_name in ['ocnpost']: - add_anl_to_post = False - - def _get_postgroups(cdump, config, add_anl=False): + def _get_postgroups(cdump, config): fhmin = config['FHMIN'] fhmax = config['FHMAX'] @@ -591,8 +604,6 @@ def _get_postgroups(cdump, config, add_anl=False): fhrs = [f'f{fhr:03d}' for fhr in fhrs] fhrs = np.array_split(fhrs, ngrps) fhrs = [fhr.tolist() for fhr in fhrs] - if add_anl: - fhrs.insert(0, ['anl']) grp = ' '.join(f'_{fhr[0]}-{fhr[-1]}' if len(fhr) > 1 else f'_{fhr[0]}' for fhr in fhrs) dep = ' '.join([fhr[-1] for fhr in fhrs]) @@ -610,14 +621,13 @@ def _get_postgroups(cdump, config, add_anl=False): dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) postenvars = self.envars.copy() - postenvar_dict = {'FHRGRP': '#grp#', - 'FHRLST': '#lst#', + postenvar_dict = {'FHRLST': '#lst#', 'ROTDIR': self._base.get('ROTDIR')} for key, value in postenvar_dict.items(): postenvars.append(rocoto.create_envar(name=key, value=str(value))) varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = _get_postgroups(self.cdump, self._configs[task_name], add_anl=add_anl_to_post) + varval1, varval2, varval3 = _get_postgroups(self.cdump, self._configs[task_name]) vardict = {varname2: varval2, varname3: varval3} cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump From 93bc918cdabee771c182c6f33cc54a20b5bb86c6 Mon Sep 17 00:00:00 2001 From: Kate Friedman Date: Thu, 2 Nov 2023 16:01:18 -0400 Subject: [PATCH 07/14] Create separate GSI monitor jobs and remove from vrfy job (#1983) This PR will move the GSI monitor tasks from the `vrfy` job into their own separate jobs in the rocoto mesh. This takes care of part of issue #235. Changes in this PR: 1. Create new individual GSI monitor jobs in the rocoto mesh to match how it is done in operations. The new jobs are: `gdasverfozn`, `gdasverfrad`, `gdasvminmon`, and `gfsvminmon`. The names correspond to the names in operations and the ecf script names. The jobs are only added to cycled mode, none of the jobs are run in forecast-only mode. 2. Updates to GSI monitor scripts: * Update `ush/jjob_header.sh` arguments in JJOB scripts to call new individual job names and configs. * Introduce `COM_*` variables and use of the `generate_com` utility. This results in the outputs landing in `COM` subfolders, like occurs in operations, instead of landing in the online archive. * Some variable cleanup in JJOB and ush scripts, includes cleaning up usage of `DATA` variable and its removal at the end of the jobs. * Add missing exit statements to `JGFS_ATMOS_VMINMON` and `JGDAS_ATMOS_VERFRAD`. 3. Create new rocoto job scripts for the new individual GSI monitor jobs. 4. Create new configs for the new individual GSI monitor jobs. 5. Add new individual GSI monitor jobs to resource configuration, using the same resources as in operations. 6. Move and rename GSI monitor job switches from `config.vrfy` to `config.base`. Also add switches to setup system. New switches to control each job being added to the rocoto mesh are: `DO_VERFOZN`, `DO_VERFRAD`, `DO_VMINMON` 7. Add GSI monitor job output to archival job and into resulting HPSS tarballs or local tarballs. This now matches what is done in operations. 8. Remove the GSI monitor tasks from the rocoto `vrfy` job and associated `config.vrfy`. Example output locations and files (not showing contents of oznmon/horiz or oznmon/time for brevity, see archival script for full list of output files): ``` kate.friedman@dlogin09:/lfs/h2/emc/ptmp/kate.friedman/comrot/testmonitor2> ll gdas.20211221/00/products/atmos/*mon gdas.20211221/00/products/atmos/minmon: total 112 -rw-r--r-- 1 kate.friedman emc 14030 Oct 25 15:00 2021122100.costs.txt -rw-r--r-- 1 kate.friedman emc 84932 Oct 25 15:00 2021122100.cost_terms.txt -rw-r--r-- 1 kate.friedman emc 808 Oct 25 15:00 2021122100.gnorms.ieee_d -rw-r--r-- 1 kate.friedman emc 808 Oct 25 15:00 2021122100.reduction.ieee_d -rw-r--r-- 1 kate.friedman emc 80 Oct 25 15:00 gnorm_data.txt gdas.20211221/00/products/atmos/oznmon: total 8 drwxr-sr-x 2 kate.friedman emc 4096 Oct 25 15:05 horiz drwxr-sr-x 2 kate.friedman emc 4096 Oct 25 15:05 time gdas.20211221/00/products/atmos/radmon: total 21036 -rw-r--r-- 1 kate.friedman emc 231 Oct 25 15:12 bad_diag.2021122100 -rw-r--r-- 1 kate.friedman emc 9035 Oct 25 15:12 bad_pen.2021122100 -rw-r--r-- 1 kate.friedman emc 1449 Oct 25 15:12 low_count.2021122100 -rw-r--r-- 1 kate.friedman emc 20523403 Oct 25 15:07 radmon_angle.tar.gz -rw-r--r-- 1 kate.friedman emc 217272 Oct 25 15:08 radmon_bcoef.tar.gz -rw-r--r-- 1 kate.friedman emc 502151 Oct 25 15:10 radmon_bcor.tar.gz -rw-r--r-- 1 kate.friedman emc 264480 Oct 25 15:12 radmon_time.tar.gz -rw-r--r-- 1 kate.friedman emc 684 Oct 25 15:12 warning.2021122100 kate.friedman@dlogin09:/lfs/h2/emc/ptmp/kate.friedman/comrot/testmonitor2> ll gfs.20211221/00/products/atmos/minmon/ total 88 -rw-r--r-- 1 kate.friedman emc 10530 Oct 25 14:55 2021122100.costs.txt -rw-r--r-- 1 kate.friedman emc 63882 Oct 25 14:55 2021122100.cost_terms.txt -rw-r--r-- 1 kate.friedman emc 808 Oct 25 14:55 2021122100.gnorms.ieee_d -rw-r--r-- 1 kate.friedman emc 608 Oct 25 14:55 2021122100.reduction.ieee_d -rw-r--r-- 1 kate.friedman emc 80 Oct 25 14:55 gnorm_data.txt ``` Lingering work and considerations: 1. Issue #1925 2. Consider not hardcoding the list of types in archival and perhaps tie it to something reliable or configurable (maybe into `config.verfozn`?): `subtyplist="gome_metop-b omi_aura ompslp_npp ompsnp_n20 ompsnp_npp ompstc8_n20 ompstc8_npp sbuv2_n19"` Resolves #1908 --- jobs/JGDAS_ATMOS_VERFOZN | 19 +++------ jobs/JGDAS_ATMOS_VERFRAD | 26 +++++------- jobs/JGDAS_ATMOS_VMINMON | 19 +++++---- jobs/JGFS_ATMOS_VMINMON | 22 +++++------ jobs/JGLOBAL_ARCHIVE | 3 +- jobs/rocoto/verfozn.sh | 22 +++++++++++ jobs/rocoto/verfrad.sh | 22 +++++++++++ jobs/rocoto/vminmon.sh | 22 +++++++++++ jobs/rocoto/vrfy.sh | 50 ----------------------- parm/config/gfs/config.base.emc.dyn | 3 ++ parm/config/gfs/config.com | 2 + parm/config/gfs/config.resources | 31 ++++++++++++++- parm/config/gfs/config.verfozn | 9 +++++ parm/config/gfs/config.verfrad | 9 +++++ parm/config/gfs/config.vminmon | 9 +++++ parm/config/gfs/config.vrfy | 47 ---------------------- ush/hpssarch_gen.sh | 56 +++++++++++++++++++++++++- ush/minmon_xtrct_costs.pl | 2 +- ush/minmon_xtrct_gnorms.pl | 2 +- ush/minmon_xtrct_reduct.pl | 2 +- workflow/applications/applications.py | 3 ++ workflow/applications/gfs_cycled.py | 21 ++++++++++ workflow/rocoto/gfs_tasks.py | 57 ++++++++++++++++++++++++++- workflow/rocoto/tasks.py | 3 +- 24 files changed, 305 insertions(+), 156 deletions(-) create mode 100755 jobs/rocoto/verfozn.sh create mode 100755 jobs/rocoto/verfrad.sh create mode 100755 jobs/rocoto/vminmon.sh create mode 100644 parm/config/gfs/config.verfozn create mode 100644 parm/config/gfs/config.verfrad create mode 100644 parm/config/gfs/config.vminmon diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN index deccc0b28e..3b75359e6a 100755 --- a/jobs/JGDAS_ATMOS_VERFOZN +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -4,7 +4,7 @@ # Set up environment for GDAS Ozone Monitor job ############################################################# source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "verfozn" -c "base verfozn" export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} @@ -42,15 +42,12 @@ export p_cyc=${pdate:8:2} #--------------------------------------------- # OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE # -export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/atmos/oznmon} -export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/atmos/oznmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_OZNMON -if [[ ! -d ${TANKverf_ozn} ]]; then - mkdir -p -m 775 ${TANKverf_ozn} -fi +export TANKverf_ozn=${TANKverf_ozn:-${COM_ATMOS_OZNMON}} + +if [[ ! -d ${TANKverf_ozn} ]]; then mkdir -p -m 775 ${TANKverf_ozn} ; fi #--------------------------------------- # set up validation file @@ -77,10 +74,6 @@ err=$? ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD index 42e112c74f..301af7d06e 100755 --- a/jobs/JGDAS_ATMOS_VERFRAD +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -4,18 +4,13 @@ # Set up environment for GDAS Radiance Monitor job ############################################################# source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "verfrad" -c "base verfrad" export COMPONENT="atmos" -export RAD_DATA_IN=${DATA} - export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} -mkdir -p ${RAD_DATA_IN} -cd ${RAD_DATA_IN} - ############################################## # Specify Execution Areas ############################################## @@ -52,13 +47,15 @@ export p_cyc=${pdate:8:2} # COMOUT - WHERE GSI OUTPUT RESIDES # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/atmos/radmon} -export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/atmos/radmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RADMON +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_RADMON_PREV:COM_ATMOS_RADMON_TMPL -mkdir -p -m 775 ${TANKverf_rad} +export TANKverf_rad=${TANKverf_rad:-${COM_ATMOS_RADMON}} +export TANKverf_radM1=${TANKverf_radM1:-${COM_ATMOS_RADMON_PREV}} + +if [[ ! -d ${TANKverf_rad} ]]; then mkdir -p -m 775 ${TANKverf_rad} ; fi +if [[ ! -d ${TANKverf_radM1} ]]; then mkdir -p -m 775 ${TANKverf_radM1} ; fi ######################################## # Set necessary environment variables @@ -89,9 +86,6 @@ fi ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-YES} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${RAD_DATA_IN} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" +exit 0 diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON index 3f9c0d856f..c7c6d12f9a 100755 --- a/jobs/JGDAS_ATMOS_VMINMON +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -4,7 +4,7 @@ # GDAS Minimization Monitor (MinMon) job ########################################################### source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" ########################################################### # obtain unique process id (pid) and make temp directories @@ -40,12 +40,15 @@ export p_cyc=${pdate:8:2} ############################################# # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# -export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/atmos/minmon} -export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/atmos/minmon} - YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_MINMON +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL + +export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}} -mkdir -p -m 775 ${M_TANKverf} +if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 ${M_TANKverf} ; fi +if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 ${M_TANKverfM1} ; fi ######################################## @@ -65,10 +68,6 @@ err=$? ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" exit 0 diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON index a7300b4dd3..c548d806f5 100755 --- a/jobs/JGFS_ATMOS_VMINMON +++ b/jobs/JGFS_ATMOS_VMINMON @@ -4,7 +4,7 @@ # GFS Minimization Monitor (MinMon) job ########################################################### source "${HOMEgfs}/ush/preamble.sh" -source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vminmon" -c "base vminmon" ########################################################### # obtain unique process id (pid) and make temp directories @@ -39,12 +39,15 @@ export p_cyc=${pdate:8:2} # TANKverf - WHERE OUTPUT DATA WILL RESIDE ############################################# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS -YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_MINMON +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_MINMON_PREV:COM_ATMOS_MINMON_TMPL -M_TANKverf=${M_TANKverf:-${COM_ATMOS_ANALYSIS}/minmon} -export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_ANALYSIS_PREV}/minmon} +export M_TANKverf=${M_TANKverf:-${COM_ATMOS_MINMON}} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_MINMON_PREV}} + +if [[ ! -d ${M_TANKverf} ]]; then mkdir -p -m 775 ${M_TANKverf} ; fi +if [[ ! -d ${M_TANKverfM1} ]]; then mkdir -p -m 775 ${M_TANKverfM1} ; fi -mkdir -p -m 775 ${M_TANKverf} ######################################## # Set necessary environment variables @@ -63,11 +66,6 @@ err=$? ################################ # Remove the Working Directory ################################ -KEEPDATA=${KEEPDATA:-NO} -cd ${DATAROOT} - -if [ ${KEEPDATA} = NO ] ; then - rm -rf ${DATA} -fi - +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" +exit 0 diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index f2828a9fdd..e6c016e703 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -18,7 +18,8 @@ YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMO COM_OBS COM_TOP \ COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_RESTART COM_OCEAN_XSECT COM_OCEAN_2D COM_OCEAN_3D \ COM_OCEAN_ANALYSIS \ - COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION + COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION \ + COM_ATMOS_OZNMON COM_ATMOS_RADMON COM_ATMOS_MINMON for grid in "0p25" "0p50" "1p00"; do YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" diff --git a/jobs/rocoto/verfozn.sh b/jobs/rocoto/verfozn.sh new file mode 100755 index 0000000000..70a772fca6 --- /dev/null +++ b/jobs/rocoto/verfozn.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="verfozn" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" + +"${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/verfrad.sh b/jobs/rocoto/verfrad.sh new file mode 100755 index 0000000000..a687d3cf26 --- /dev/null +++ b/jobs/rocoto/verfrad.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="verfrad" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" + +"${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/vminmon.sh b/jobs/rocoto/vminmon.sh new file mode 100755 index 0000000000..2bbb7599ca --- /dev/null +++ b/jobs/rocoto/vminmon.sh @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +(( status != 0 )) && exit "${status}" + +export job="vminmon" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +echo +echo "=============== START TO RUN MINMON ===============" + +"${HOMEgfs}/jobs/J${RUN^^}_ATMOS_VMINMON" +status=$? + +exit "${status}" diff --git a/jobs/rocoto/vrfy.sh b/jobs/rocoto/vrfy.sh index 7dc52351fc..67ac43e137 100755 --- a/jobs/rocoto/vrfy.sh +++ b/jobs/rocoto/vrfy.sh @@ -22,11 +22,6 @@ CDATEm1=$(${NDATE} -24 "${PDY}${cyc}") export CDATEm1 export PDYm1=${CDATEm1:0:8} -CDATEm1c=$(${NDATE} -06 "${PDY}${cyc}") -PDYm1c=${CDATEm1c:0:8} -pcyc=${CDATEm1c:8:2} - - ############################################################### # TODO: We can likely drop support for these dev-only grib1 precip files echo @@ -57,51 +52,6 @@ if [[ "${RUNMOS}" == "YES" && "${CDUMP}" == "gfs" ]]; then fi -############################################################### -echo -echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" - -if [[ "${VRFYRAD}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - - export EXP=${PSLOT} - export TANKverf_rad="${TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" - export TANKverf_radM1="${TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" - export MY_MACHINE=${machine} - - ${VRFYRADSH} - -fi - - -############################################################### -echo -echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -if [[ "${VRFYOZN}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - - export EXP=${PSLOT} - export TANKverf_ozn="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" - export TANKverf_oznM1="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" - export MY_MACHINE=${machine} - - ${VRFYOZNSH} - -fi - - -############################################################### -echo -echo "=============== START TO RUN MINMON ===============" -if [[ "${VRFYMINMON}" == "YES" && "${PDY}${cyc}" != "${SDATE}" && "${MODE}" = "cycled" ]]; then - - export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" - export M_TANKverfM1="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" - export MY_MACHINE=${machine} - - ${VRFYMINSH} - -fi - - ################################################################################ echo echo "=============== START TO RUN CYCLONE TRACK VERIFICATION ===============" diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn index b77787794c..8445f50400 100644 --- a/parm/config/gfs/config.base.emc.dyn +++ b/parm/config/gfs/config.base.emc.dyn @@ -57,6 +57,9 @@ export DO_BUFRSND="NO" # BUFR sounding products export DO_GEMPAK="NO" # GEMPAK products export DO_AWIPS="NO" # AWIPS products export DO_VRFY="YES" # VRFY step +export DO_VERFOZN="YES" # Ozone data assimilation monitoring +export DO_VERFRAD="YES" # Radiance data assimilation monitoring +export DO_VMINMON="YES" # GSI minimization monitoring # NO for retrospective parallel; YES for real-time parallel # arch.sh uses REALTIME for MOS. Need to set REALTIME=YES diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index 87a5b74b6f..208b0ac096 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -63,6 +63,8 @@ declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_v declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_OZNMON_TMPL=${COM_BASE}'/products/atmos/oznmon' +declare -rx COM_ATMOS_RADMON_TMPL=${COM_BASE}'/products/atmos/radmon' declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 1c21048c26..037b98803d 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -14,7 +14,8 @@ if [[ $# -ne 1 ]]; then echo "atmensanlinit atmensanlrun atmensanlfinal" echo "landanl" echo "aeroanlinit aeroanlrun aeroanlfinal" - echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch cleanup echgres" + echo "anal sfcanl analcalc analdiag fcst post echgres" + echo "verfozn verfrad vminmon vrfy fit2obs metp arch cleanup" echo "eobs ediag eomg eupd ecen esfc efcs epos earc" echo "init_chem mom6ic ocnpost" echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" @@ -714,6 +715,34 @@ elif [[ ${step} = "post" ]]; then if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi export is_exclusive=True +elif [[ ${step} = "verfozn" ]]; then + + export wtime_verfozn="00:05:00" + export npe_verfozn=1 + export nth_verfozn=1 + export npe_node_verfozn=1 + export memory_verfozn="1G" + +elif [[ ${step} = "verfrad" ]]; then + + export wtime_verfrad="00:20:00" + export npe_verfrad=1 + export nth_verfrad=1 + export npe_node_verfrad=1 + export memory_verfrad="5G" + +elif [[ ${step} = "vminmon" ]]; then + + export wtime_vminmon="00:05:00" + export npe_vminmon=1 + export nth_vminmon=1 + export npe_node_vminmon=1 + export wtime_vminmon_gfs="00:05:00" + export npe_vminmon_gfs=1 + export nth_vminmon_gfs=1 + export npe_node_vminmon_gfs=1 + export memory_vminmon="1G" + elif [[ ${step} = "vrfy" ]]; then export wtime_vrfy="03:00:00" diff --git a/parm/config/gfs/config.verfozn b/parm/config/gfs/config.verfozn new file mode 100644 index 0000000000..4091db3a11 --- /dev/null +++ b/parm/config/gfs/config.verfozn @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.verfozn ########## +echo "BEGIN: config.verfozn" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfozn + +echo "END: config.verfozn" diff --git a/parm/config/gfs/config.verfrad b/parm/config/gfs/config.verfrad new file mode 100644 index 0000000000..e6dffcaa45 --- /dev/null +++ b/parm/config/gfs/config.verfrad @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.verfrad ########## +echo "BEGIN: config.verfrad" + +# Get task specific resources +. "${EXPDIR}/config.resources" verfrad + +echo "END: config.verfrad" diff --git a/parm/config/gfs/config.vminmon b/parm/config/gfs/config.vminmon new file mode 100644 index 0000000000..d8888a7cb6 --- /dev/null +++ b/parm/config/gfs/config.vminmon @@ -0,0 +1,9 @@ +#! /usr/bin/env bash + +########## config.vminmon ########## +echo "BEGIN: config.vminmon" + +# Get task specific resources +. "${EXPDIR}/config.resources" vminmon + +echo "END: config.vminmon" diff --git a/parm/config/gfs/config.vrfy b/parm/config/gfs/config.vrfy index 8754609c50..8b8c393ee1 100644 --- a/parm/config/gfs/config.vrfy +++ b/parm/config/gfs/config.vrfy @@ -10,58 +10,11 @@ echo "BEGIN: config.vrfy" export CDFNL="gdas" # Scores verification against GDAS/GFS analysis export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification -export VRFYRAD="YES" # Radiance data assimilation monitoring -export VRFYOZN="YES" # Ozone data assimilation monitoring -export VRFYMINMON="YES" # GSI minimization monitoring export VRFYTRAK="YES" # Hurricane track verification export VRFYGENESIS="YES" # Cyclone genesis verification export VRFYFSU="NO" # Cyclone genesis verification (FSU) export RUNMOS="NO" # whether to run entire MOS package -#---------------------------------------------------------- -# Minimization, Radiance and Ozone Monitoring -#---------------------------------------------------------- - -if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then - - export envir="para" - export COM_IN=${ROTDIR} - - # Radiance Monitoring - if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then - - export RADMON_SUFFIX=${PSLOT} - export TANKverf="${NOSCRUB}/monitor/radmon" - export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" - - fi - - # Minimization Monitoring - if [[ "${VRFYMINMON}" = "YES" ]] ; then - - export MINMON_SUFFIX=${PSLOT} - export M_TANKverf="${NOSCRUB}/monitor/minmon" - if [[ "${RUN}" = "gdas" ]] ; then - export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" - elif [[ "${RUN}" = "gfs" ]] ; then - export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" - fi - - fi - - # Ozone Monitoring - if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then - - export HOMEgfs_ozn="${HOMEgfs}" - export OZNMON_SUFFIX=${PSLOT} - export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" - export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" - - fi - -fi - - #------------------------------------------------- # Cyclone genesis and cyclone track verification #------------------------------------------------- diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 2caf0040ef..07f3c0c8bd 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -130,6 +130,17 @@ if [[ ${type} = "gfs" ]]; then for file in "${genesis_files[@]}"; do [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" done + + # GSI Monitor job output + + if [[ ${DO_VMINMON} = "YES" ]]; then + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" + fi + } >> "${DATA}/gfsa.txt" { @@ -350,7 +361,7 @@ if [[ ${type} == "gdas" ]]; then if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" fi - for fstep in prep anal fcst vrfy radmon minmon oznmon; do + for fstep in prep anal fcst vrfy verfozn verfrad vminmon; do if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then echo "./logs/${PDY}${cyc}/gdas${fstep}.log" fi @@ -379,6 +390,49 @@ if [[ ${type} == "gdas" ]]; then echo "${file}.idx" fi done + + # GSI Monitor jobs output + + if [[ ${DO_VERFOZN} = "YES" ]]; then + for type in horiz time; do + if [[ ${type} = "horiz" ]]; then + suffix=".gz" + elif [[ ${type} = "time" ]]; then + suffix="" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_cnt.${PDY}${cyc}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_diag.${PDY}${cyc}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/bad_pen.${PDY}${cyc}" + fi + subtyplist="gome_metop-b omi_aura ompslp_npp ompsnp_n20 ompsnp_npp ompstc8_n20 ompstc8_npp sbuv2_n19" + for subtype in ${subtyplist}; do + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.${PDY}${cyc}.ieee_d${suffix}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.anl.ctl" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.${PDY}${cyc}.ieee_d${suffix}" + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/${subtype}.ges.ctl" + done + echo "${COM_ATMOS_OZNMON/${ROTDIR}\//}/${type}/stdout.${type}.tar.gz" + done + fi + + if [[ ${DO_VERFRAD} = "YES" ]]; then + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_diag.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/bad_pen.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/low_count.${PDY}${cyc}" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_angle.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcoef.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_bcor.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/radmon_time.tar.gz" + echo "${COM_ATMOS_RADMON/${ROTDIR}\//}/warning.${PDY}${cyc}" + fi + + if [[ ${DO_VMINMON} = "YES" ]]; then + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.costs.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.cost_terms.txt" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.gnorms.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/${PDY}${cyc}.reduction.ieee_d" + echo "${COM_ATMOS_MINMON/${ROTDIR}\//}/gnorm_data.txt" + fi + } >> "${DATA}/gdas.txt" #.................. diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl index 1b5d490102..502032da80 100755 --- a/ush/minmon_xtrct_costs.pl +++ b/ush/minmon_xtrct_costs.pl @@ -208,7 +208,7 @@ #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl index ecd44232da..0125c58ac8 100755 --- a/ush/minmon_xtrct_gnorms.pl +++ b/ush/minmon_xtrct_gnorms.pl @@ -414,7 +414,7 @@ sub updateGnormData { #-------------------------- # move files to $M_TANKverf #-------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl index f6037d3f32..1b8186b6ad 100755 --- a/ush/minmon_xtrct_reduct.pl +++ b/ush/minmon_xtrct_reduct.pl @@ -72,7 +72,7 @@ #---------------------------- # copy outfile to $M_TANKverf #---------------------------- - my $tankdir = $ENV{"M_TANKverfM0"}; + my $tankdir = $ENV{"M_TANKverf"}; if(! -d $tankdir) { system( "mkdir -p $tankdir" ); } diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index 19c2082dd3..138a5ef43f 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -55,6 +55,9 @@ def __init__(self, conf: Configuration) -> None: self.do_awips = _base.get('DO_AWIPS', False) self.do_wafs = _base.get('WAFSF', False) self.do_vrfy = _base.get('DO_VRFY', True) + self.do_verfozn = _base.get('DO_VERFOZN', True) + self.do_verfrad = _base.get('DO_VERFRAD', True) + self.do_vminmon = _base.get('DO_VMINMON', True) self.do_metp = _base.get('DO_METP', False) self.do_hpssarch = _base.get('HPSSARCH', False) diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 6edd6bc0d8..cdb5e18f3e 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -56,6 +56,15 @@ def _get_app_configs(self): configs += ['eobs', 'eomg', 'ediag', 'eupd'] configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] + if self.do_verfozn: + configs += ['verfozn'] + + if self.do_verfrad: + configs += ['verfrad'] + + if self.do_vminmon: + configs += ['vminmon'] + if self.do_metp: configs += ['metp'] @@ -159,6 +168,15 @@ def get_task_names(self): if self.do_fit2obs: gdas_tasks += ['fit2obs'] + if self.do_verfozn: + gdas_tasks += ['verfozn'] + + if self.do_verfrad: + gdas_tasks += ['verfrad'] + + if self.do_vminmon: + gdas_tasks += ['vminmon'] + gdas_tasks += gdas_gfs_common_cleanup_tasks # Collect "gfs" cycle tasks @@ -171,6 +189,9 @@ def get_task_names(self): gfs_tasks += gdas_gfs_common_tasks_after_fcst + if self.do_vminmon: + gfs_tasks += ['vminmon'] + if self.do_metp: gfs_tasks += ['metp'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index ca9af5af7d..5e2ed8cd03 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -864,6 +864,39 @@ def gempak(self): return task + def verfozn(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('verfozn') + task = create_wf_task('verfozn', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def verfrad(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}analdiag'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('verfrad') + task = create_wf_task('verfrad', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def vminmon(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('vminmon') + task = create_wf_task('vminmon', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + def vrfy(self): deps = [] dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} @@ -918,6 +951,28 @@ def metp(self): def arch(self): deps = [] + dependencies = [] + if self.app_config.do_verfozn or self.app_config.do_verfrad or self.app_config.do_vminmon: + if self.app_config.mode in ['cycled']: + if self.cdump in ['gfs']: + if self.app_config.do_vminmon: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'} + deps.append(rocoto.add_dependency(dep_dict)) + elif self.cdump in ['gdas']: + deps2 = [] + if self.app_config.do_verfozn: + dep_dict = {'type': 'task', 'name': f'{self.cdump}verfozn'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_verfrad: + dep_dict = {'type': 'task', 'name': f'{self.cdump}verfrad'} + deps2.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_vminmon: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vminmon'} + deps2.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps2) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) if self.app_config.do_vrfy: dep_dict = {'type': 'task', 'name': f'{self.cdump}vrfy'} deps.append(rocoto.add_dependency(dep_dict)) @@ -941,7 +996,7 @@ def arch(self): dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps + dependencies) cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 29ed57daf2..b1dd1b0d92 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -19,7 +19,8 @@ class Tasks: 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', 'preplandobs', 'landanl', - 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', + 'fcst', 'post', 'ocnpost', + 'verfozn', 'verfrad', 'vminmon', 'vrfy', 'metp', 'postsnd', 'awips', 'gempak', 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep'] From bd4c56d99c23c5c41f0bd5f8a980ef99ee160888 Mon Sep 17 00:00:00 2001 From: AndrewEichmann-NOAA <58948505+AndrewEichmann-NOAA@users.noreply.github.com> Date: Thu, 2 Nov 2023 16:09:59 -0400 Subject: [PATCH 08/14] changed optional script name vars for ocean analysis (#2025) --- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN | 2 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY | 2 +- jobs/JGLOBAL_PREP_OCEAN_OBS | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT index a9366f7d80..a1ecc116ea 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT @@ -20,7 +20,7 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} +EXSCRIPT=${GDASOCNBMATSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY index 944181089c..08e7da60c0 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY @@ -19,7 +19,7 @@ export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} +EXSCRIPT=${GDASOCNMBATVRFYSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT index 6eab956ad9..afac9fbc25 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT @@ -34,7 +34,7 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_HISTORY_PREV:COM_ ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} +EXSCRIPT=${GDASOCNCHKPTSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST index ab86b66f20..6034fc5425 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST @@ -31,7 +31,7 @@ export PYTHONPATH # Run relevant script ############################################################### -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} +EXSCRIPT=${GDASOCNPOSTPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP index 3aecf1fb2e..2e49a9f14d 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -41,7 +41,7 @@ export PYTHONPATH ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} +EXSCRIPT=${GDASOCNPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN index 7c584b74d8..5871497223 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN @@ -17,7 +17,7 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalr ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} +EXSCRIPT=${GDASOCNRUNSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY index ec154af7f9..aba76d7d1a 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY @@ -32,7 +32,7 @@ export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} +EXSCRIPT=${GDASOCNVRFYPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" diff --git a/jobs/JGLOBAL_PREP_OCEAN_OBS b/jobs/JGLOBAL_PREP_OCEAN_OBS index d5064859d2..44cbbf1c08 100755 --- a/jobs/JGLOBAL_PREP_OCEAN_OBS +++ b/jobs/JGLOBAL_PREP_OCEAN_OBS @@ -19,7 +19,7 @@ export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/soca:${PYTHONPATH} ############################################################### # Run relevant script -EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} +EXSCRIPT=${GDASPREPOCNOBSPY:-${HOMEgfs}/ush/exglobal_prep_ocean_obs.py} ${EXSCRIPT} status=$? [[ ${status} -ne 0 ]] && exit "${status}" From cbdc09711a83493f363a737d1e3e46c48a09719d Mon Sep 17 00:00:00 2001 From: Walter Kolczynski - NOAA Date: Fri, 3 Nov 2023 04:09:33 +0000 Subject: [PATCH 09/14] Fix staging of MOM data (#2028) During the update to stage_ic, the copying of the additional res_N files for 0p25 was omitted. These are now properly copied. Resolves #2027 --- scripts/exglobal_stage_ic.sh | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh index 43812adc89..53042c7e45 100755 --- a/scripts/exglobal_stage_ic.sh +++ b/scripts/exglobal_stage_ic.sh @@ -60,6 +60,26 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do rc=$? ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" err=$((err + rc)) + case "${OCNRES}" in + "500" | "100") + # Nothing more to do for these resolutions + ;; + "025" ) + for nn in $(seq 1 3); do + src="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + tgt="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + ${NCP} "${src}" "${tgt}" + rc=$? + ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}" + err=$((err + rc)) + done + ;; + *) + echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" + rc=1 + err=$((err + rc)) + ;; + esac fi # Stage ice initial conditions to ROTDIR (warm start) if [[ "${DO_ICE:-}" = "YES" ]]; then From 2563806821d4ae3b57120bc61aa0575139a708f0 Mon Sep 17 00:00:00 2001 From: jiandewang Date: Fri, 3 Nov 2023 11:49:37 -0400 Subject: [PATCH 10/14] Add two ucx modules in load_ufswm_modules.sh to solve C768 and C1152 S2SW job hanging issue on WCOSS2 (#2021) --- ush/load_ufswm_modules.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh index 2d6e6a02c1..9fea63f402 100755 --- a/ush/load_ufswm_modules.sh +++ b/ush/load_ufswm_modules.sh @@ -19,6 +19,8 @@ if [[ "${MACHINE_ID}" != "noaacloud" ]]; then module load cray-pals module load cfp module load libjpeg + module load craype-network-ucx + module load cray-mpich-ucx else module load prod-util export UTILROOT=${prod_util_ROOT} From 8d55126bb0b27c79c8bee7f72b014abd34950ff9 Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Fri, 3 Nov 2023 17:06:09 -0400 Subject: [PATCH 11/14] Implement IAU Cycling Type with Marine Data Assimilation Enabled (#1944) Allows cycling in IAU mode with the S2S UFS model, soca and the GSI. Most of the work related to making IAU work was done by @JessicaMeixner-NOAA . Thanks @JessicaMeixner-NOAA ! Resolves #1943 Refs #1776 --- parm/config/gfs/config.resources | 12 +++++++--- parm/ufs/fv3/diag_table_da | 18 +++++++-------- parm/ufs/mom6/MOM_input_template_025 | 4 ++-- parm/ufs/mom6/MOM_input_template_100 | 2 +- parm/ufs/mom6/MOM_input_template_500 | 9 ++++++++ ush/forecast_postdet.sh | 15 ++++++------ ush/forecast_predet.sh | 34 +++++++++++++++------------- 7 files changed, 55 insertions(+), 39 deletions(-) diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 037b98803d..1f89994b69 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -394,13 +394,14 @@ elif [[ "${step}" = "ocnanalrun" ]]; then case ${CASE} in C384) npes=480 - export memory_ocnanalchkpt="2.8TB" + memory_ocnanalrun="128GB" ;; C96) npes=16 ;; C48) npes=16 + memory_ocnanalrun="64GB" ;; *) echo "FATAL: Resolution not supported'" @@ -409,10 +410,11 @@ elif [[ "${step}" = "ocnanalrun" ]]; then export wtime_ocnanalrun="00:15:00" export npe_ocnanalrun=${npes} - export nth_ocnanalrun=1 + export nth_ocnanalrun=2 export is_exclusive=True npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) export npe_node_ocnanalrun + export memory_ocnanalrun elif [[ "${step}" = "ocnanalchkpt" ]]; then @@ -648,7 +650,11 @@ elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then declare -x "wtime_${step}"="00:30:00" declare -x "wtime_${step}_gfs"="03:00:00" ;; - "C384" | "C768" | "C1152") + "C384") + declare -x "wtime_${step}"="00:20:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + "C768" | "C1152") declare -x "wtime_${step}"="01:00:00" declare -x "wtime_${step}_gfs"="06:00:00" ;; diff --git a/parm/ufs/fv3/diag_table_da b/parm/ufs/fv3/diag_table_da index cdcc36ee57..40824caee9 100644 --- a/parm/ufs/fv3/diag_table_da +++ b/parm/ufs/fv3/diag_table_da @@ -2,15 +2,15 @@ "fv3_history2d", 0, "hours", 1, "hours", "time" "ocn_da%4yr%2mo%2dy%2hr", 1, "hours", 1, "hours", "time", 1, "hours" -"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 -"ocean_model", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model_z", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", .false., "none", 2 "gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/mom6/MOM_input_template_025 b/parm/ufs/mom6/MOM_input_template_025 index 604689c376..df56a3f486 100644 --- a/parm/ufs/mom6/MOM_input_template_025 +++ b/parm/ufs/mom6/MOM_input_template_025 @@ -341,8 +341,8 @@ DIAG_COORDS = "z Z ZSTAR" ! A list of string tuples associating diag_table modules to ! a coordinate definition used for diagnostics. Each string ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". -DIAG_COORD_DEF_Z="FILE:interpolate_zgrid_40L.nc,interfaces=zw" -DIAG_MISVAL = -1e34 +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] !DIAG_COORD_DEF_RHO2 = "FILE:diag_rho2.nc,interfaces=rho2" ! default = "WOA09" ! Determines how to specify the coordinate resolution. Valid options are: ! PARAM - use the vector-parameter DIAG_COORD_RES_RHO2 diff --git a/parm/ufs/mom6/MOM_input_template_100 b/parm/ufs/mom6/MOM_input_template_100 index 5c671fe9d3..f26d6e4bfb 100644 --- a/parm/ufs/mom6/MOM_input_template_100 +++ b/parm/ufs/mom6/MOM_input_template_100 @@ -322,7 +322,7 @@ DIAG_COORDS = "z Z ZSTAR" ! a coordinate definition used for diagnostics. Each string ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" -DIAG_MISVAL = -1e34 +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] !AVAILABLE_DIAGS_FILE = "available_diags.002160" ! default = "available_diags.000000" ! A file into which to write a list of all available ocean diagnostics that can ! be included in a diag_table. diff --git a/parm/ufs/mom6/MOM_input_template_500 b/parm/ufs/mom6/MOM_input_template_500 index 1d75ba1e71..dde805d247 100644 --- a/parm/ufs/mom6/MOM_input_template_500 +++ b/parm/ufs/mom6/MOM_input_template_500 @@ -258,6 +258,15 @@ Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False ! If True, then remap straight to model coordinate from file. ! === module MOM_diag_mediator === +NUM_DIAG_COORDS = 1 + ! The number of diagnostic vertical coordinates to use. + ! For each coordinate, an entry in DIAG_COORDS must be provided. +DIAG_COORDS = "z Z ZSTAR" + ! A list of string tuples associating diag_table modules to + ! a coordinate definition used for diagnostics. Each string + ! is of the form "MODULE_SUFFIX,PARAMETER_SUFFIX,COORDINATE_NAME". +DIAG_COORD_DEF_Z="FILE:@[MOM6_DIAG_COORD_DEF_Z_FILE],interfaces=zw" +DIAG_MISVAL = @[MOM6_DIAG_MISVAL] ! === module MOM_MEKE === USE_MEKE = True ! [Boolean] default = False diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index e3166cd72f..bafa61dd0e 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -678,12 +678,12 @@ MOM6_postdet() { echo "SUB ${FUNCNAME[0]}: MOM6 after run type determination" # Copy MOM6 ICs - ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" case ${OCNRES} in "025") for nn in $(seq 1 4); do - if [[ -f "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" ]]; then - ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" + if [[ -f "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" ]]; then + ${NLN} "${COM_OCEAN_RESTART_PREV}/${sPDY}.${scyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" fi done ;; @@ -904,7 +904,7 @@ CICE_postdet() { # Copy CICE ICs echo "Link CICE ICs" - cice_restart_file="${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" + cice_restart_file="${COM_ICE_RESTART_PREV}/${sPDY}.${scyc}0000.cice_model.res.nc" if [[ ! -f "${cice_restart_file}" ]]; then echo "FATAL ERROR: CICE restart file not found at '${cice_restart_file}', ABORT!" exit 112 @@ -1038,7 +1038,7 @@ GOCART_postdet() { rm -f "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" fi - #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end + #To Do: Temporarily removing this as this will crash gocart, adding copy statement at the end #${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" \ # "${DATA}/gocart.inst_aod.${vdate:0:8}_${vdate:8:2}00z.nc4" done @@ -1049,8 +1049,8 @@ GOCART_out() { # Copy gocart.inst_aod after the forecast is run (and successfull) # TO DO: this should be linked but there were issues where gocart was crashing if it was linked - local fhr - local vdate + local fhr + local vdate for fhr in ${FV3_OUTPUT_FH}; do if (( fhr == 0 )); then continue; fi vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H) @@ -1060,4 +1060,3 @@ GOCART_out() { } - diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index d6f5cc7cc3..ce0b50f818 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -71,6 +71,24 @@ common_predet(){ next_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${assim_freq} hours" +%Y%m%d%H) forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) + # IAU options + IAU_OFFSET=${IAU_OFFSET:-0} + DOIAU=${DOIAU:-"NO"} + if [[ "${DOIAU}" = "YES" ]]; then + sCDATE=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - 3 hours" +%Y%m%d%H) + sPDY="${sCDATE:0:8}" + scyc="${sCDATE:8:2}" + tPDY=${previous_cycle:0:8} + tcyc=${previous_cycle:8:2} + else + sCDATE=${current_cycle} + sPDY=${current_cycle:0:8} + scyc=${current_cycle:8:2} + tPDY=${sPDY} + tcyc=${scyc} + fi + + cd "${DATA}" || ( echo "FATAL ERROR: Unable to 'cd ${DATA}', ABORT!"; exit 8 ) } @@ -121,10 +139,8 @@ FV3_predet(){ PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment # IAU options - DOIAU=${DOIAU:-"NO"} IAUFHRS=${IAUFHRS:-0} IAU_DELTHRS=${IAU_DELTHRS:-0} - IAU_OFFSET=${IAU_OFFSET:-0} # Model config options ntiles=6 @@ -216,20 +232,6 @@ FV3_predet(){ mkdir -p "${DATA}/RESTART" fi - if [[ "${DOIAU}" = "YES" ]]; then - sCDATE=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - 3 hours" +%Y%m%d%H) - sPDY="${sCDATE:0:8}" - scyc="${sCDATE:8:2}" - tPDY=${previous_cycle:0:8} - tcyc=${previous_cycle:8:2} - else - sCDATE=${current_cycle} - sPDY=${current_cycle:0:8} - scyc=${current_cycle:8:2} - tPDY=${sPDY} - tcyc=${scyc} - fi - echo "SUB ${FUNCNAME[0]}: pre-determination variables set" } From 75269e4bb9764b81589efc7d703825b80e74c8f5 Mon Sep 17 00:00:00 2001 From: TerrenceMcGuinness-NOAA Date: Mon, 6 Nov 2023 17:34:47 +0000 Subject: [PATCH 12/14] Streamline CI reporting to PRs (#2026) Cleaned up the logs for CI PRs sent to the GitHub Messages: - Eliminates redundant accumulative reporting - Reports on all created or skipped experiment instantiations in a single message - Gives one line report once for each experiment completion - Single final report completion of CI See some examples in forked [PR 195](https://github.com/TerrenceMcGuinness-NOAA/global-workflow/pull/195) Resolves #2007 --- ci/platforms/config.hera | 4 ++-- ci/platforms/config.orion | 4 ++-- ci/scripts/check_ci.sh | 37 ++++++++++++++++---------------- ci/scripts/clone-build_ci.sh | 23 +++++++++----------- ci/scripts/driver.sh | 40 ++++++++++++++++++++++------------- workflow/create_experiment.py | 6 +++--- 6 files changed, 61 insertions(+), 53 deletions(-) diff --git a/ci/platforms/config.hera b/ci/platforms/config.hera index c4c87bc197..76a6a08670 100644 --- a/ci/platforms/config.hera +++ b/ci/platforms/config.hera @@ -4,5 +4,5 @@ export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR export STMP="/scratch1/NCEPDEV/stmp2/${USER}" export SLURM_ACCOUNT=nems -export max_concurrent_cases=2 -export max_concurrent_pr=2 +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/platforms/config.orion b/ci/platforms/config.orion index 0cbbd5fe47..886a6e63b2 100644 --- a/ci/platforms/config.orion +++ b/ci/platforms/config.orion @@ -4,5 +4,5 @@ export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR export STMP="/work/noaa/stmp/${USER}" export SLURM_ACCOUNT=nems -export max_concurrent_cases=2 -export max_concurrent_pr=2 +export max_concurrent_cases=5 +export max_concurrent_pr=4 diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh index 097e20ced4..a5d7c77e66 100755 --- a/ci/scripts/check_ci.sh +++ b/ci/scripts/check_ci.sh @@ -70,6 +70,8 @@ fi for pr in ${pr_list}; do id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + output_ci="${GFS_CI_ROOT}/PR/${pr}/output_runtime_${id}" + output_ci_single="${GFS_CI_ROOT}/PR/${pr}/output_runtime_single.log" echo "Processing Pull Request #${pr} and looking for cases" pr_dir="${GFS_CI_ROOT}/PR/${pr}" @@ -83,8 +85,9 @@ for pr in ${pr_list}; do # shellcheck disable=SC2312 if [[ -z $(ls -A "${pr_dir}/RUNTESTS/EXPDIR") ]] ; then "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed" - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + sed -i "1 i\`\`\`" "${output_ci}" + sed -i "1 i\All CI Test Cases Passed:" "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" # Check to see if this PR that was opened by the weekly tests and if so close it if it passed on all platforms weekly_labels=$(${GH} pr view "${pr}" --repo "${REPO_URL}" --json headRefName,labels,author --jq 'select(.author.login | contains("emcbot")) | select(.headRefName | contains("weekly_ci")) | .labels[].name ') || true @@ -121,22 +124,20 @@ for pr in ${pr_list}; do rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true - num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + # num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true if [[ ${num_failed} -ne 0 ]]; then - { - echo "Experiment ${pslot} Terminated: *FAILED*" - echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed" + error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true { + echo "Experiment ${pslot} Terminated: *** FAILED ***" + echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true echo "Error logs:" echo "${error_logs}" - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + } >> "${output_ci}" + sed -i "1 i\`\`\`" "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" for kill_cases in "${pr_dir}/RUNTESTS/"*; do pslot=$(basename "${kill_cases}") @@ -145,16 +146,16 @@ for pr in ${pr_list}; do break fi if [[ "${num_done}" -eq "${num_cycles}" ]]; then - { - echo "Experiment ${pslot} completed: *SUCCESS*" - echo "Experiment ${pslot} Completed at $(date)" || true - echo "with ${num_succeeded} successfully completed jobs" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" #Remove Experment cases that completed successfully rm -Rf "${pslot_dir}" rm -Rf "${pr_dir}/RUNTESTS/COMROT/${pslot}" + rm -f "${output_ci_single}" + # echo "\`\`\`" > "${output_ci_single}" + DATE=$(date) + echo "Experiment ${pslot} **SUCCESS** ${DATE}" >> "${output_ci_single}" + echo "Experiment ${pslot} **SUCCESS** at ${DATE}" >> "${output_ci}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}" + fi done done diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 03eff13158..4b77d38ab8 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -40,7 +40,6 @@ while getopts "p:d:o:h" opt; do done cd "${repodir}" || exit 1 -# clone copy of repo if [[ -d global-workflow ]]; then rm -Rf global-workflow fi @@ -48,13 +47,6 @@ fi git clone "${REPO_URL}" cd global-workflow || exit 1 -pr_state=$("${GH}" pr view "${PR}" --json state --jq '.state') -if [[ "${pr_state}" != "OPEN" ]]; then - title=$("${GH}" pr view "${PR}" --json title --jq '.title') - echo "PR ${title} is no longer open, state is ${pr_state} ... quitting" - exit 1 -fi - # checkout pull request "${GH}" pr checkout "${PR}" --repo "${REPO_URL}" HOMEgfs="${PWD}" @@ -78,19 +70,17 @@ echo "${commit}" > "../commit" # run checkout script cd sorc || exit 1 set +e -# TODO enable -u later when GDASApp tests are added ./checkout.sh -c -g -u >> log.checkout 2>&1 checkout_status=$? if [[ ${checkout_status} != 0 ]]; then { - echo "Checkout: *FAILED*" + echo "Checkout: *** FAILED ***" echo "Checkout: Failed at $(date)" || true echo "Checkout: see output at ${PWD}/log.checkout" } >> "${outfile}" exit "${checkout_status}" else { - echo "Checkout: *SUCCESS*" echo "Checkout: Completed at $(date)" || true } >> "${outfile}" fi @@ -104,19 +94,26 @@ build_status=$? if [[ ${build_status} != 0 ]]; then { - echo "Build: *FAILED*" + echo "Build: *** FAILED ***" echo "Build: Failed at $(date)" || true echo "Build: see output at ${PWD}/log.build" } >> "${outfile}" exit "${build_status}" else { - echo "Build: *SUCCESS*" echo "Build: Completed at $(date)" || true } >> "${outfile}" fi ./link_workflow.sh +link_status=$? +if [[ ${link_status} != 0 ]]; then + { + echo "Link: *** FAILED ***" + echo "Link: Failed at $(date)" || true + } >> "${outfile}" + exit "${link_status}" +fi echo "check/build/link test completed" exit "${build_status}" diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh index 00143fa049..7988ff17a1 100755 --- a/ci/scripts/driver.sh +++ b/ci/scripts/driver.sh @@ -119,7 +119,10 @@ for pr in ${pr_list}; do # call clone-build_ci to clone and build PR id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') set +e - "${ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" + output_ci="${pr_dir}/output_build_${id}" + rm -f "${output_ci}" + "${ROOT_DIR}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${output_ci}" + #echo "SKIPPING: ${ROOT_DIR}/ci/scripts/clone-build_ci.sh" ci_status=$? ################################################################## # Checking for special case when Ready label was updated @@ -138,7 +141,7 @@ for pr in ${pr_list}; do #setup space to put an experiment # export RUNTESTS for yaml case files to pickup export RUNTESTS="${pr_dir}/RUNTESTS" - #rm -Rf "${pr_dir:?}/RUNTESTS/"* + rm -Rf "${pr_dir:?}/RUNTESTS/"* ############################################################# # loop over every yaml file in the PR's ci/cases @@ -155,39 +158,46 @@ for pr in ${pr_list}; do rm -Rf "${STMP}/RUNDIRS/${pslot}" set +e export LOGFILE_PATH="${HOMEgfs}/ci/scripts/create_experiment.log" - "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" + rm -f "${LOGFILE_PATH}" + "${HOMEgfs}/workflow/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/pr/${case}.yaml" 2>&1 "${LOGFILE_PATH}" ci_status=$? set -e if [[ ${ci_status} -eq 0 ]]; then + last_line=$(tail -1 "${LOGFILE_PATH}") + if [[ "${last_line}" == *"Skipping creation"* ]]; then + action="Skipped" + else + action="Completed" + fi { - echo "Created experiment: *SUCCESS*" - echo "Case setup: Completed at $(date) for experiment ${pslot}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" - "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running + echo "Case setup: ${action} for experiment ${pslot}" || true + } >> "${output_ci}" else { - echo "Failed to create experiment: *FAIL* ${pslot}" - echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true + echo "*** Failed *** to create experiment: ${pslot}" echo "" cat "${LOGFILE_PATH}" - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + } >> "${output_ci}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + exit 1 fi done + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" + "${ROOT_DIR}/ci/scripts/pr_list_database.py" --dbfile "${pr_list_dbfile}" --update_pr "${pr}" Open Running + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" + else { - echo '```' echo "Failed on cloning and building global-workflowi PR: ${pr}" echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}" || true - } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + } >> "${output_ci}" "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" "${ROOT_DIR}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" --dbfile "${pr_list_dbfile}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci}" fi - sed -i "s/\`\`\`//2g" "${GFS_CI_ROOT}/PR/${pr}/output_${id}" - "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" done # looping over each open and labeled PR diff --git a/workflow/create_experiment.py b/workflow/create_experiment.py index bfc87672f4..cfa49e0d38 100755 --- a/workflow/create_experiment.py +++ b/workflow/create_experiment.py @@ -34,7 +34,7 @@ _top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) # Setup the logger -logger = Logger(logfile_path=os.environ.get("LOGFILE_PATH"), level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) +logger = Logger(logfile_path=os.environ.get("LOGFILE_PATH"), level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=False) @logit(logger) @@ -77,9 +77,9 @@ def input_args(): data.update(os.environ) testconf = parse_j2yaml(path=user_inputs.yaml, data=data) - if 'exclude' in testconf: + if 'skip_ci_on_hosts' in testconf: host = Host() - if host.machine.lower() in [excluded_host.lower() for excluded_host in testconf.exclude]: + if host.machine.lower() in testconf.skip_ci_on_hosts.lower(): logger.info(f'Skipping creation of case: {testconf.arguments.pslot} on {host.machine.capitalize()}') sys.exit(0) From 34a73cf2ed5afefc776d9cc73b6ebf439122a1bb Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Mon, 6 Nov 2023 14:00:51 -0500 Subject: [PATCH 13/14] Reduce gdas builds to 8 #2029 (#2036) Reduce the default number of build jobs for the GDASApp to 8 from 16. This is needed for Orion as the build crashes during a linking step. Though not verified, it appears this may be caused by using too much memory with 16 builds. The issue disappears when using 8 build jobs. Resolves #2029 --- sorc/build_all.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 95183f9065..4ba0b92888 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -141,7 +141,7 @@ fi # Optional DA builds if [[ -d gdas.cd ]]; then - build_jobs["gdas"]=16 + build_jobs["gdas"]=8 big_jobs=$((big_jobs+1)) build_opts["gdas"]="${_verbose_opt}" fi From 5183c43bbbd07285869feacb49d8680afe85929a Mon Sep 17 00:00:00 2001 From: Barry Baker Date: Tue, 7 Nov 2023 01:14:40 -0500 Subject: [PATCH 14/14] Update UFS for dust fix and remove nitrates by default (#1989) This PR updates the `ufs-weather-model` to the latest hash that included the dust fix (PR #1922). Along with this I removed the nitrates by default in support of the GEFSv13 EP4 and EP5 experiments. Removed unneeded inputs that should help with speed improvements. --- Externals.cfg | 2 +- parm/ufs/chem/AERO_HISTORY.rc | 413 ++++++++++++++--------------- parm/ufs/chem/CAP.rc | 11 +- parm/ufs/chem/DU2G_instance_DU.rc | 4 +- parm/ufs/chem/ExtData.gbbepx | 8 +- parm/ufs/chem/ExtData.other | 39 ++- parm/ufs/chem/ExtData.qfed | 8 +- parm/ufs/chem/GOCART2G_GridComp.rc | 2 +- parm/ufs/chem/gocart_tracer.list | 5 - parm/ufs/fv3/diag_table.aero | 10 +- parm/ufs/fv3/field_table.aero | 50 ++-- sorc/checkout.sh | 2 +- 12 files changed, 274 insertions(+), 280 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index abe5f30aaf..e5d7c0d8c9 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -8,7 +8,7 @@ protocol = git required = True [UFS] -tag = 68050e5 +tag = 63a43d9 local_path = sorc/ufs_model.fd repo_url = https://github.com/ufs-community/ufs-weather-model.git protocol = git diff --git a/parm/ufs/chem/AERO_HISTORY.rc b/parm/ufs/chem/AERO_HISTORY.rc index 19f572bb8c..4c7df15b2a 100644 --- a/parm/ufs/chem/AERO_HISTORY.rc +++ b/parm/ufs/chem/AERO_HISTORY.rc @@ -6,7 +6,7 @@ VERSION: 1 EXPID: gocart EXPDSC: GOCART2g_diagnostics_at_c360 EXPSRC: GEOSgcm-v10.16.0 - +Allow_Overwrite: .true. COLLECTIONS: 'inst_aod' # 'inst_du_ss' @@ -25,7 +25,7 @@ COLLECTIONS: 'inst_aod' # 'tavg_du_bin' # 'tavg_2d_rad' # 'tavg_3d_rad' - :: + :: ################################################## # The GRID_LABELS section must be after the main # @@ -63,8 +63,8 @@ PC720x361-DC.LM: 72 inst_du_ss.ref_time: 000000 , inst_du_ss.nbits: 10, inst_du_ss.fields: 'DU' , 'DU' , - 'SS' , 'SS' , - :: + 'SS' , 'SS' , + :: tavg_du_ss.format: 'CFIO' , tavg_du_ss.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -77,8 +77,8 @@ PC720x361-DC.LM: 72 tavg_du_ss.ref_time: 000000 , tavg_du_ss.nbits: 10, tavg_du_ss.fields: 'DU' , 'DU' , - 'SS' , 'SS' , - :: + 'SS' , 'SS' , + :: inst_ca.format: 'CFIO' , inst_ca.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -90,10 +90,10 @@ PC720x361-DC.LM: 72 inst_ca.ref_time: 000000 , inst_ca.nbits: 10, inst_ca.fields: 'CAphilicCA.bc' , 'CA.bc' , - 'CAphobicCA.bc' , 'CA.bc' , - 'CAphilicCA.oc' , 'CA.oc' , - 'CAphobicCA.oc' , 'CA.oc' , - :: + 'CAphobicCA.bc' , 'CA.bc' , + 'CAphilicCA.oc' , 'CA.oc' , + 'CAphobicCA.oc' , 'CA.oc' , + :: inst_ni.format: 'CFIO' , inst_ni.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -105,11 +105,11 @@ PC720x361-DC.LM: 72 inst_ni.ref_time: 000000 , inst_ni.nbits: 10, inst_ni.fields: 'NH3' , 'NI' , - 'NH4a' , 'NI' , - 'NO3an1' , 'NI' , - 'NO3an2' , 'NI' , - 'NO3an3' , 'NI' , - :: + 'NH4a' , 'NI' , + 'NO3an1' , 'NI' , + 'NO3an2' , 'NI' , + 'NO3an3' , 'NI' , + :: inst_su.format: 'CFIO' , inst_su.descr: '3d,Hourly,Instantaneous,Model-Level,Aerosol Concentrations', @@ -121,10 +121,10 @@ PC720x361-DC.LM: 72 inst_su.ref_time: 000000 , inst_su.nbits: 10, inst_su.fields: 'DMS' , 'SU' , - 'SO2' , 'SU' , - 'SO4' , 'SU' , - 'MSA' , 'SU' , - :: + 'SO2' , 'SU' , + 'SO4' , 'SU' , + 'MSA' , 'SU' , + :: # # Binned aerosols # @@ -140,11 +140,11 @@ PC720x361-DC.LM: 72 inst_du_bin.ref_time: 000000 , inst_du_bin.nbits: 10, inst_du_bin.fields: 'DUEM' , 'DU' , - 'DUSD' , 'DU' , - 'DUDP' , 'DU' , - 'DUWT' , 'DU' , - 'DUSV' , 'DU' , - :: + 'DUSD' , 'DU' , + 'DUDP' , 'DU' , + 'DUWT' , 'DU' , + 'DUSV' , 'DU' , + :: tavg_du_bin.format: 'CFIO' , tavg_du_bin.descr: '2d,Hourly,Instantaneous' @@ -157,11 +157,11 @@ PC720x361-DC.LM: 72 tavg_du_bin.ref_time: 000000 , tavg_du_bin.nbits: 10, tavg_du_bin.fields: 'DUEM' , 'DU' , - 'DUSD' , 'DU' , - 'DUDP' , 'DU' , - 'DUWT' , 'DU' , - 'DUSV' , 'DU' , - :: + 'DUSD' , 'DU' , + 'DUDP' , 'DU' , + 'DUWT' , 'DU' , + 'DUSV' , 'DU' , + :: inst_ss_bin.format: 'CFIO' , inst_ss_bin.descr: '2d,Hourly,Instantaneous' @@ -174,11 +174,11 @@ PC720x361-DC.LM: 72 inst_ss_bin.ref_time: 000000 , inst_ss_bin.nbits: 10, inst_ss_bin.fields: 'SSEM' , 'SS' , - 'SSSD' , 'SS' , - 'SSDP' , 'SS' , - 'SSWT' , 'SS' , - 'SSSV' , 'SS' , - :: + 'SSSD' , 'SS' , + 'SSDP' , 'SS' , + 'SSWT' , 'SS' , + 'SSSV' , 'SS' , + :: inst_ca_bin.format: 'CFIO' , inst_ca_bin.descr: '3d,Hourly,Instantaneous,Model-Level' @@ -191,16 +191,16 @@ PC720x361-DC.LM: 72 inst_ca_bin.ref_time: 000000 , inst_ca_bin.nbits: 10, inst_ca_bin.fields: 'CAEMCA.bc' , 'CA.bc' , - 'CAEMCA.oc' , 'CA.oc' , - 'CASDCA.bc' , 'CA.bc' , - 'CASDCA.oc' , 'CA.oc' , - 'CADPCA.bc' , 'CA.bc' , - 'CADPCA.oc' , 'CA.oc' , - 'CAWTCA.bc' , 'CA.bc' , - 'CAWTCA.oc' , 'CA.oc' , - 'CASVCA.bc' , 'CA.bc' , - 'CASVCA.oc' , 'CA.oc' , - :: + 'CAEMCA.oc' , 'CA.oc' , + 'CASDCA.bc' , 'CA.bc' , + 'CASDCA.oc' , 'CA.oc' , + 'CADPCA.bc' , 'CA.bc' , + 'CADPCA.oc' , 'CA.oc' , + 'CAWTCA.bc' , 'CA.bc' , + 'CAWTCA.oc' , 'CA.oc' , + 'CASVCA.bc' , 'CA.bc' , + 'CASVCA.oc' , 'CA.oc' , + :: inst_ni_bin.format: 'CFIO' , inst_ni_bin.descr: '3d,Hourly,Instantaneous,Model-Level' @@ -213,11 +213,11 @@ PC720x361-DC.LM: 72 inst_ni_bin.ref_time: 000000 , inst_ni_bin.nbits: 10, inst_ni_bin.fields: 'NIHT' , 'NI' , - 'NISD' , 'NI' , - 'NIDP' , 'NI' , - 'NIWT' , 'NI' , - 'NISV' , 'NI' , - :: + 'NISD' , 'NI' , + 'NIDP' , 'NI' , + 'NIWT' , 'NI' , + 'NISV' , 'NI' , + :: inst_su_bin.format: 'CFIO' , inst_su_bin.descr: '3d,Hourly,Instantaneous,Model-Level' @@ -230,11 +230,11 @@ PC720x361-DC.LM: 72 inst_su_bin.ref_time: 000000 , inst_su_bin.nbits: 10, inst_su_bin.fields: 'SUEM' , 'SU', - 'SUSD' , 'SU', - 'SUDP' , 'SU', - 'SUWT' , 'SU', - 'SUSV' , 'SU', - :: + 'SUSD' , 'SU', + 'SUDP' , 'SU', + 'SUWT' , 'SU', + 'SUSV' , 'SU', + :: # # Other 2d diagnostics @@ -249,92 +249,92 @@ PC720x361-DC.LM: 72 inst_2d.ref_time: 000000, inst_2d.grid_label: PC720x361-DC inst_2d.fields: 'DUSMASS' , 'DU' , - 'DUCMASS' , 'DU' , - 'DUSMASS25' , 'DU' , - 'DUCMASS25' , 'DU' , - 'DUAERIDX' , 'DU' , - 'DUFLUXU' , 'DU' , - 'DUFLUXV' , 'DU' , - 'DUANGSTR' , 'DU' , - 'SSSMASS' , 'SS' , - 'SSCMASS' , 'SS' , - 'SSSMASS25' , 'SS' , - 'SSCMASS25' , 'SS' , - 'SSAERIDX' , 'SS' , - 'SSANGSTR' , 'SS' , - 'SSFLUXU' , 'SS' , - 'SSFLUXV' , 'SS' , - 'CAEMANCA.bc' , 'CA.bc' , - 'CAEMANCA.oc' , 'CA.oc' , - 'CAEMBBCA.bc' , 'CA.bc' , - 'CAEMBBCA.oc' , 'CA.oc' , - 'CAEMBFCA.bc' , 'CA.bc' , - 'CAEMBFCA.oc' , 'CA.oc' , - 'CAEMBGCA.bc' , 'CA.bc' , - 'CAEMBGCA.oc' , 'CA.oc' , - 'CAHYPHILCA.bc' , 'CA.bc' , - 'CAHYPHILCA.oc' , 'CA.oc' , - 'CAPSOACA.bc' , 'CA.bc' , - 'CAPSOACA.oc' , 'CA.oc' , - 'CASMASSCA.bc' , 'CA.bc' , - 'CASMASSCA.oc' , 'CA.oc' , - 'CACMASSCA.bc' , 'CA.bc' , - 'CACMASSCA.oc' , 'CA.oc' , - 'CAANGSTRCA.bc' , 'CA.bc' , - 'CAANGSTRCA.oc' , 'CA.oc' , - 'CAFLUXUCA.bc' , 'CA.bc' , - 'CAFLUXUCA.oc' , 'CA.oc' , - 'CAFLUXVCA.bc' , 'CA.bc' , - 'CAFLUXVCA.oc' , 'CA.oc' , - 'CAAERIDXCA.bc' , 'CA.bc' , - 'CAAERIDXCA.oc' , 'CA.oc' , - 'NIPNO3AQ' , 'NI' , - 'NIPNH4AQ' , 'NI' , - 'NIPNH3AQ' , 'NI' , - 'NH3EM' , 'NI' , - 'NH3DP' , 'NI' , - 'NH3WT' , 'NI' , - 'NH3SV' , 'NI' , - 'NH4SD' , 'NI' , - 'NH4DP' , 'NI' , - 'NH4WT' , 'NI' , - 'NH4SV' , 'NI' , - 'HNO3SMASS' , 'NI' , - 'NH3SMASS' , 'NI' , - 'NH4SMASS' , 'NI' , - 'NISMASS' , 'NI' , - 'NISMASS25' , 'NI' , - 'HNO3CMASS' , 'NI' , - 'NH3CMASS' , 'NI' , - 'NH4CMASS' , 'NI' , - 'NICMASS' , 'NI' , - 'NICMASS25' , 'NI' , - 'NIANGSTR' , 'NI' , - 'NIFLUXU' , 'NI' , - 'NIFLUXV' , 'NI' , - 'SUPSO2' , 'SU' , - 'SUPSO4' , 'SU' , - 'SUPSO4G' , 'SU' , - 'SUPSO4AQ' , 'SU' , - 'SUPSO4WT' , 'SU' , - 'SUPMSA' , 'SU' , - 'SO2SMASS' , 'SU' , - 'SO2CMASS' , 'SU' , - 'SO4SMASS' , 'SU' , - 'SO4CMASS' , 'SU' , - 'DMSSMASS' , 'SU' , - 'DMSCMASS' , 'SU' , - 'MSASMASS' , 'SU' , - 'MSACMASS' , 'SU' , - 'SUANGSTR' , 'SU' , - 'SUFLUXU' , 'SU' , - 'SUFLUXV' , 'SU' , - 'SO4EMAN' , 'SU' , - 'SO2EMAN' , 'SU' , - 'SO2EMBB' , 'SU' , - 'SO2EMVN' , 'SU' , - 'SO2EMVE' , 'SU' , - :: + 'DUCMASS' , 'DU' , + 'DUSMASS25' , 'DU' , + 'DUCMASS25' , 'DU' , + 'DUAERIDX' , 'DU' , + 'DUFLUXU' , 'DU' , + 'DUFLUXV' , 'DU' , + 'DUANGSTR' , 'DU' , + 'SSSMASS' , 'SS' , + 'SSCMASS' , 'SS' , + 'SSSMASS25' , 'SS' , + 'SSCMASS25' , 'SS' , + 'SSAERIDX' , 'SS' , + 'SSANGSTR' , 'SS' , + 'SSFLUXU' , 'SS' , + 'SSFLUXV' , 'SS' , + 'CAEMANCA.bc' , 'CA.bc' , + 'CAEMANCA.oc' , 'CA.oc' , + 'CAEMBBCA.bc' , 'CA.bc' , + 'CAEMBBCA.oc' , 'CA.oc' , + 'CAEMBFCA.bc' , 'CA.bc' , + 'CAEMBFCA.oc' , 'CA.oc' , + 'CAEMBGCA.bc' , 'CA.bc' , + 'CAEMBGCA.oc' , 'CA.oc' , + 'CAHYPHILCA.bc' , 'CA.bc' , + 'CAHYPHILCA.oc' , 'CA.oc' , + 'CAPSOACA.bc' , 'CA.bc' , + 'CAPSOACA.oc' , 'CA.oc' , + 'CASMASSCA.bc' , 'CA.bc' , + 'CASMASSCA.oc' , 'CA.oc' , + 'CACMASSCA.bc' , 'CA.bc' , + 'CACMASSCA.oc' , 'CA.oc' , + 'CAANGSTRCA.bc' , 'CA.bc' , + 'CAANGSTRCA.oc' , 'CA.oc' , + 'CAFLUXUCA.bc' , 'CA.bc' , + 'CAFLUXUCA.oc' , 'CA.oc' , + 'CAFLUXVCA.bc' , 'CA.bc' , + 'CAFLUXVCA.oc' , 'CA.oc' , + 'CAAERIDXCA.bc' , 'CA.bc' , + 'CAAERIDXCA.oc' , 'CA.oc' , + 'NIPNO3AQ' , 'NI' , + 'NIPNH4AQ' , 'NI' , + 'NIPNH3AQ' , 'NI' , + 'NH3EM' , 'NI' , + 'NH3DP' , 'NI' , + 'NH3WT' , 'NI' , + 'NH3SV' , 'NI' , + 'NH4SD' , 'NI' , + 'NH4DP' , 'NI' , + 'NH4WT' , 'NI' , + 'NH4SV' , 'NI' , + 'HNO3SMASS' , 'NI' , + 'NH3SMASS' , 'NI' , + 'NH4SMASS' , 'NI' , + 'NISMASS' , 'NI' , + 'NISMASS25' , 'NI' , + 'HNO3CMASS' , 'NI' , + 'NH3CMASS' , 'NI' , + 'NH4CMASS' , 'NI' , + 'NICMASS' , 'NI' , + 'NICMASS25' , 'NI' , + 'NIANGSTR' , 'NI' , + 'NIFLUXU' , 'NI' , + 'NIFLUXV' , 'NI' , + 'SUPSO2' , 'SU' , + 'SUPSO4' , 'SU' , + 'SUPSO4G' , 'SU' , + 'SUPSO4AQ' , 'SU' , + 'SUPSO4WT' , 'SU' , + 'SUPMSA' , 'SU' , + 'SO2SMASS' , 'SU' , + 'SO2CMASS' , 'SU' , + 'SO4SMASS' , 'SU' , + 'SO4CMASS' , 'SU' , + 'DMSSMASS' , 'SU' , + 'DMSCMASS' , 'SU' , + 'MSASMASS' , 'SU' , + 'MSACMASS' , 'SU' , + 'SUANGSTR' , 'SU' , + 'SUFLUXU' , 'SU' , + 'SUFLUXV' , 'SU' , + 'SO4EMAN' , 'SU' , + 'SO2EMAN' , 'SU' , + 'SO2EMBB' , 'SU' , + 'SO2EMVN' , 'SU' , + 'SO2EMVE' , 'SU' , + :: # # 3d diagnostics @@ -348,30 +348,30 @@ PC720x361-DC.LM: 72 inst_3d.ref_time: 000000, inst_3d.grid_label: PC720x361-DC inst_3d.fields: 'DUMASS' , 'DU', - 'DUMASS25' , 'DU', - 'DUCONC' , 'DU', - 'SSMASS' , 'SS', - 'SSMASS25' , 'SS', - 'SSCONC' , 'SS', - 'CAMASSCA.bc' , 'CA.bc' , - 'CACONCCA.bc' , 'CA.bc' , - 'CAMASSCA.oc' , 'CA.oc' , - 'CACONCCA.oc' , 'CA.oc' , - 'SO4MASS' , 'SU', - 'SO4SAREA' , 'SU', - 'SO4SNUM' , 'SU', - 'SUCONC' , 'SU', - 'PSO2' , 'SU', - 'PMSA' , 'SU', - 'PSO4' , 'SU', - 'PSO4G' , 'SU', - 'PSO4WET' , 'SU', - 'PSO4AQ' , 'SU', - 'DMS' , 'SU', - 'SO2' , 'SU', - 'SO4' , 'SU', - 'MSA' , 'SU', - :: + 'DUMASS25' , 'DU', + 'DUCONC' , 'DU', + 'SSMASS' , 'SS', + 'SSMASS25' , 'SS', + 'SSCONC' , 'SS', + 'CAMASSCA.bc' , 'CA.bc' , + 'CACONCCA.bc' , 'CA.bc' , + 'CAMASSCA.oc' , 'CA.oc' , + 'CACONCCA.oc' , 'CA.oc' , + 'SO4MASS' , 'SU', + 'SO4SAREA' , 'SU', + 'SO4SNUM' , 'SU', + 'SUCONC' , 'SU', + 'PSO2' , 'SU', + 'PMSA' , 'SU', + 'PSO4' , 'SU', + 'PSO4G' , 'SU', + 'PSO4WET' , 'SU', + 'PSO4AQ' , 'SU', + 'DMS' , 'SU', + 'SO2' , 'SU', + 'SO4' , 'SU', + 'MSA' , 'SU', + :: # @@ -386,13 +386,12 @@ PC720x361-DC.LM: 72 inst_aod.ref_time: 000000, inst_aod.grid_label: PC720x361-DC inst_aod.fields: 'CA.bcEXTTAU' , 'CA.bc' , 'AOD_BC', - 'CA.ocEXTTAU' , 'CA.oc' , 'AOD_OC', - 'DUEXTTAU' , 'DU' , 'AOD_DU', - 'NIEXTTAU' , 'NI' , 'AOD_NI', - 'SSEXTTAU' , 'SS' , 'AOD_SS', - 'SUEXTTAU' , 'SU' , 'AOD_SU', - 'TOTEXTTAU' , 'GOCART2G' , 'AOD' , - :: + 'CA.ocEXTTAU' , 'CA.oc' , 'AOD_OC', + 'DUEXTTAU' , 'DU' , 'AOD_DU', + 'SSEXTTAU' , 'SS' , 'AOD_SS', + 'SUEXTTAU' , 'SU' , 'AOD_SU', + 'TOTEXTTAU' , 'GOCART2G' , 'AOD' , + :: tavg_2d_rad.format: 'CFIO' , @@ -404,30 +403,30 @@ PC720x361-DC.LM: 72 tavg_2d_rad.ref_time: 000000, tavg_2d_rad.grid_label: PC720x361-DC tavg_2d_rad.fields: 'CA.bcEXTTAU' , 'CA.bc' , - 'CA.ocEXTTAU' , 'CA.oc' , - 'CASCATAUCA.bc' , 'CA.bc' , - 'CASCATAUCA.oc' , 'CA.oc' , - 'DUEXTTAU' , 'DU' , - 'DUSCATAU' , 'DU' , - 'DUEXTT25' , 'DU' , - 'DUSCAT25' , 'DU' , - 'DUEXTTFM' , 'DU' , - 'DUSCATFM' , 'DU' , - 'NIEXTTFM' , 'NI' , - 'NISCATFM' , 'NI' , - 'NIEXTT25' , 'NI' , - 'NISCAT25' , 'NI' , - 'NIEXTTAU' , 'NI' , - 'NISCATAU' , 'NI' , - 'SSEXTTAU' , 'SS' , - 'SSSCATAU' , 'SS' , - 'SSEXTT25' , 'SS' , - 'SSSCAT25' , 'SS' , - 'SSEXTTFM' , 'SS' , - 'SSSCATFM' , 'SS' , - 'SUEXTTAU' , 'SU' , - 'SUSCATAU' , 'SU' , - :: + 'CA.ocEXTTAU' , 'CA.oc' , + 'CASCATAUCA.bc' , 'CA.bc' , + 'CASCATAUCA.oc' , 'CA.oc' , + 'DUEXTTAU' , 'DU' , + 'DUSCATAU' , 'DU' , + 'DUEXTT25' , 'DU' , + 'DUSCAT25' , 'DU' , + 'DUEXTTFM' , 'DU' , + 'DUSCATFM' , 'DU' , + 'NIEXTTFM' , 'NI' , + 'NISCATFM' , 'NI' , + 'NIEXTT25' , 'NI' , + 'NISCAT25' , 'NI' , + 'NIEXTTAU' , 'NI' , + 'NISCATAU' , 'NI' , + 'SSEXTTAU' , 'SS' , + 'SSSCATAU' , 'SS' , + 'SSEXTT25' , 'SS' , + 'SSSCAT25' , 'SS' , + 'SSEXTTFM' , 'SS' , + 'SSSCATFM' , 'SS' , + 'SUEXTTAU' , 'SU' , + 'SUSCATAU' , 'SU' , + :: tavg_3d_rad.format: 'CFIO' , tavg_3d_rad.template: '%y4%m2%d2_%h2%n2z.nc4', @@ -439,15 +438,15 @@ PC720x361-DC.LM: 72 tavg_3d_rad.grid_label: PC720x361-DC tavg_3d_rad.splitField: 1, tavg_3d_rad.fields: 'CAEXTCOEFCA.bc' , 'CA.bc' , - 'CAEXTCOEFCA.oc' , 'CA.oc' , - 'CASCACOEFCA.bc' , 'CA.bc' , - 'CASCACOEFCA.oc' , 'CA.oc' , - 'DUEXTCOEF' , 'DU' , - 'DUSCACOEF' , 'DU' , - 'NIEXTCOEF' , 'NI' , - 'NISCACOEF' , 'NI' , - 'SSEXTCOEF' , 'SS' , - 'SSSCACOEF' , 'SS' , - 'SUEXTCOEF' , 'SU' , - 'SUSCACOEF' , 'SU' , - :: + 'CAEXTCOEFCA.oc' , 'CA.oc' , + 'CASCACOEFCA.bc' , 'CA.bc' , + 'CASCACOEFCA.oc' , 'CA.oc' , + 'DUEXTCOEF' , 'DU' , + 'DUSCACOEF' , 'DU' , + 'NIEXTCOEF' , 'NI' , + 'NISCACOEF' , 'NI' , + 'SSEXTCOEF' , 'SS' , + 'SSSCACOEF' , 'SS' , + 'SUEXTCOEF' , 'SU' , + 'SUSCACOEF' , 'SU' , + :: diff --git a/parm/ufs/chem/CAP.rc b/parm/ufs/chem/CAP.rc index d40106ae81..2b8e71975b 100644 --- a/parm/ufs/chem/CAP.rc +++ b/parm/ufs/chem/CAP.rc @@ -64,12 +64,13 @@ CAP_EXPORTS: CA.bcphilic,CA.bc bc2 CA.ocphobic,CA.oc oc1 CA.ocphilic,CA.oc oc2 - NH3,NI nh3 - NH4a,NI nh4a - NO3an1,NI no3an1 - NO3an2,NI no3an2 - NO3an3,NI no3an3 :: +# NH3,NI nh3 +# NH4a,NI nh4a +# NO3an1,NI no3an1 +# NO3an2,NI no3an2 +# NO3an3,NI no3an3 +# :: # Diagnostic Tracers Table (only PM10 & PM25 available) diff --git a/parm/ufs/chem/DU2G_instance_DU.rc b/parm/ufs/chem/DU2G_instance_DU.rc index c701efb128..6c30cdf06b 100644 --- a/parm/ufs/chem/DU2G_instance_DU.rc +++ b/parm/ufs/chem/DU2G_instance_DU.rc @@ -41,6 +41,8 @@ pressure_lid_in_hPa: 0.01 emission_scheme: fengsha # choose among: fengsha, ginoux, k14 # FENGSHA settings -alpha: 0.04 +alpha: 0.1 gamma: 1.0 +soil_moisture_factor: 1 +soil_drylimit_factor: 1 vertical_to_horizontal_flux_ratio_limit: 2.e-04 diff --git a/parm/ufs/chem/ExtData.gbbepx b/parm/ufs/chem/ExtData.gbbepx index 0661e8412a..3bd516c772 100644 --- a/parm/ufs/chem/ExtData.gbbepx +++ b/parm/ufs/chem/ExtData.gbbepx @@ -2,7 +2,7 @@ # GBBEPx #-------------------------------------------------------------------------------------------------------------------------------- -SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none SO2 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc -OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none OC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc -BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none BC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc -EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none none NH3 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 SO2 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 OC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 BC ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc +# EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 NH3 ExtData/nexus/GBBEPx/GBBEPx_all01GRID.emissions_v003_%y4%m2%d2.nc diff --git a/parm/ufs/chem/ExtData.other b/parm/ufs/chem/ExtData.other index 5eb1e1dd0b..789576305e 100644 --- a/parm/ufs/chem/ExtData.other +++ b/parm/ufs/chem/ExtData.other @@ -7,16 +7,13 @@ TROPP 'Pa' Y N - 0.0 1.0 TROPP /dev/null:10000. #====== Dust Imports ================================================= -# Ginoux input files -DU_SRC NA N Y - none none du_src ExtData/Dust/gocart.dust_source.v5a.x1152_y721.nc - # FENGSHA input files. Note: regridding should be N or E - Use files with _FillValue != NaN -DU_CLAY '1' Y E - none none clayfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc -DU_SAND '1' Y E - none none sandfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_CLAY '1' Y E - none none clayfrac ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_SAND '1' Y E - none none sandfrac ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc DU_SILT '1' Y E - none none siltfrac /dev/null -DU_SSM '1' Y E - none none ssm /dev/null:1.0 -DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/Dust/FENGSHA_p81_10km_inputs.nc -DU_UTHRES '1' Y E - none none uthres ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_SSM '1' Y E - none none sep ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc +DU_UTHRES '1' Y E - none none uthres ExtData/nexus/FENGSHA/FENGSHA_2022_NESDIS_inputs_10km_v3.2.nc #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers @@ -135,16 +132,16 @@ BRC_AVIATION_CRS NA Y Y %y4-%m2-%d2t12:00:00 none none oc_aviation /dev/null # SOA production pSOA_BIOB_VOC NA Y Y %y4-%m2-%d2t12:00:00 none none biofuel /dev/null -# ======= Nitrate Sources ======== -EMI_NH3_AG 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_ag ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_EN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_en /dev/null -EMI_NH3_IN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_in ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_RE 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_re ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_TR 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_tr ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc -EMI_NH3_OC 'kg m-2 s-1' Y Y %y4-%m2-%d2T12:00:00 none none emiss_ocn ExtData/PIESA/sfc/GEIA.emis_NH3.ocean.x576_y361.t12.20080715_12z.nc4 - -# -------------------------------------------------------------- -# If using 64 levels please replace this section with the correct values (ie replace 127 with 64) -NITRATE_HNO3 'mol mol-1' Y N %y4-%m2-%d2T12:00:00 none 0.20 hno3 ExtData/PIESA/L127/GMI.vmr_HNO3.x144_y91.t12.2006.nc4 -# -------------------------------------------------------------- -NI_regionMask NA Y V - none none REGION_MASK ExtData/PIESA/sfc/ARCTAS.region_mask.x540_y361.2008.nc +# # ======= Nitrate Sources ======== +# EMI_NH3_AG 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_ag ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_EN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_en /dev/null +# EMI_NH3_IN 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_in ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_RE 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_re ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_TR 'kg m-2 s-1' N Y %y4-%m2-%d2T12:00:00 none none NH3_tr ExtData/nexus/CEDS/v2019/%y4/CEDS.2019.emis.%y4%m2%d2.nc +# EMI_NH3_OC 'kg m-2 s-1' Y Y %y4-%m2-%d2T12:00:00 none none emiss_ocn ExtData/PIESA/sfc/GEIA.emis_NH3.ocean.x576_y361.t12.20080715_12z.nc4 + +# # -------------------------------------------------------------- +# # If using 64 levels please replace this section with the correct values (ie replace 127 with 64) +# NITRATE_HNO3 'mol mol-1' Y N %y4-%m2-%d2T12:00:00 none 0.20 hno3 ExtData/PIESA/L127/GMI.vmr_HNO3.x144_y91.t12.2006.nc4 +# # -------------------------------------------------------------- +# NI_regionMask NA Y V - none none REGION_MASK ExtData/PIESA/sfc/ARCTAS.region_mask.x540_y361.2008.nc diff --git a/parm/ufs/chem/ExtData.qfed b/parm/ufs/chem/ExtData.qfed index 86ab3c86cc..b3a721211e 100644 --- a/parm/ufs/chem/ExtData.qfed +++ b/parm/ufs/chem/ExtData.qfed @@ -2,7 +2,7 @@ # QFED #-------------------------------------------------------------------------------------------------------------------------------- -SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_so2.006.%y4%m2%d2.nc4 -OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_oc.006.%y4%m2%d2.nc4 -BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_bc.006.%y4%m2%d2.nc4 -EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none none biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_nh3.006.%y4%m2%d2.nc4 +SU_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_so2.006.%y4%m2%d2.nc4 +OC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_oc.006.%y4%m2%d2.nc4 +BC_BIOMASS NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_bc.006.%y4%m2%d2.nc4 +# EMI_NH3_BB NA N Y %y4-%m2-%d2t12:00:00 none 0.7778 biomass ExtData/nexus/QFED/%y4/%m2/qfed2.emis_nh3.006.%y4%m2%d2.nc4 diff --git a/parm/ufs/chem/GOCART2G_GridComp.rc b/parm/ufs/chem/GOCART2G_GridComp.rc index 18954f8cdd..2dd63a6d17 100644 --- a/parm/ufs/chem/GOCART2G_GridComp.rc +++ b/parm/ufs/chem/GOCART2G_GridComp.rc @@ -31,7 +31,7 @@ PASSIVE_INSTANCES_SU: ACTIVE_INSTANCES_CA: CA.oc CA.bc # CA.oc.data CA.bc.data PASSIVE_INSTANCES_CA: -ACTIVE_INSTANCES_NI: NI # NI.data +ACTIVE_INSTANCES_NI: # NI # NI.data PASSIVE_INSTANCES_NI: # Set optics parameters diff --git a/parm/ufs/chem/gocart_tracer.list b/parm/ufs/chem/gocart_tracer.list index 8b0202e2c4..8742aff67c 100644 --- a/parm/ufs/chem/gocart_tracer.list +++ b/parm/ufs/chem/gocart_tracer.list @@ -16,10 +16,5 @@ seas2 seas3 seas4 seas5 -nh3 -nh4a -no3an1 -no3an2 -no3an3 pm25 pm10 diff --git a/parm/ufs/fv3/diag_table.aero b/parm/ufs/fv3/diag_table.aero index 683c50cc4a..6f96b462f1 100644 --- a/parm/ufs/fv3/diag_table.aero +++ b/parm/ufs/fv3/diag_table.aero @@ -19,10 +19,10 @@ "gfs_dyn", "seas3", "seas3", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "seas4", "seas4", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "seas5", "seas5", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nh3", "nh3", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "nh4a", "nh4a", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no3an1", "no3an1", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no3an2", "no3an2", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "no3an3", "no3an3", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "nh3", "nh3", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "nh4a", "nh4a", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "no3an1", "no3an1", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "no3an2", "no3an2", "fv3_history", "all", .false., "none", 2 +# "gfs_dyn", "no3an3", "no3an3", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "pm25", "pm25", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "pm10", "pm10", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/fv3/field_table.aero b/parm/ufs/fv3/field_table.aero index d917dd786c..385192803f 100644 --- a/parm/ufs/fv3/field_table.aero +++ b/parm/ufs/fv3/field_table.aero @@ -89,31 +89,31 @@ "units", "ug/kg" "tracer_usage", "chemistry" "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "nh3" - "longname", "primary NH3 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "nh4a" - "longname", "primary NH4a mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "no3an1" - "longname", "primary NO3an1 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "no3an2" - "longname", "primary NO3an2 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / - "TRACER", "atmos_mod", "no3an3" - "longname", "primary NO3an3 mixing ratio" - "units", "ug/kg" - "tracer_usage", "chemistry" - "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "nh3" +# "longname", "primary NH3 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "nh4a" +# "longname", "primary NH4a mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "no3an1" +# "longname", "primary NO3an1 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "no3an2" +# "longname", "primary NO3an2 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "no3an3" +# "longname", "primary NO3an3 mixing ratio" +# "units", "ug/kg" +# "tracer_usage", "chemistry" +# "profile_type", "fixed", "surface_value=0.0" / # diagnostic PM tracers "TRACER", "atmos_mod", "pm25" "longname", "primary PM25 mixing ratio" diff --git a/sorc/checkout.sh b/sorc/checkout.sh index de4fcdf838..382e7b6f32 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -151,7 +151,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 # Checkout UFS submodules in parallel -checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-68050e5}" "8" & +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-63a43d9}" "8" ; errs=$((errs + $?)) # Run all other checkouts simultaneously with just 1 core each to handle submodules. checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" &