Skip to content

Commit 066f60f

Browse files
committed
Merge branch 'develop' into feature/lnd_regrid_iau
* develop: Ensure OCNRES and ICERES have 3 digits in the archive script (NOAA-EMC#3199) Set runtime shell requirements within Jenkins Pipeline (NOAA-EMC#3171) Add efcs and epos to ufs_hybatm xml (NOAA-EMC#3192) (NOAA-EMC#3193) Fix GEFS and SFS compile flags in build_all.sh (NOAA-EMC#3197) Remove early-cycle EnKF forecast (NOAA-EMC#3185) Fix mod_icec bug in atmos_prod (NOAA-EMC#3167) Create compute build option (NOAA-EMC#3186) Support global-workflow using Rocky 8 on CSPs (NOAA-EMC#2998)
2 parents 552642f + 29089be commit 066f60f

38 files changed

+786
-296
lines changed

.github/CODEOWNERS

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -211,3 +211,4 @@ ush/python/pygfs/utils/marine_da_utils.py @guillaumevernieres @AndrewEichmann-NO
211211

212212
# Specific workflow scripts
213213
workflow/generate_workflows.sh @DavidHuber-NOAA
214+
workflow/build_compute.py @DavidHuber-NOAA @aerorahul

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,9 @@ parm/wafs
8585

8686
# Ignore sorc and logs folders from externals
8787
#--------------------------------------------
88+
sorc/build.xml
89+
sorc/build.db
90+
sorc/build_lock.db
8891
sorc/*log
8992
sorc/logs
9093
sorc/calc_analysis.fd

ci/Jenkinsfile

Lines changed: 37 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -120,9 +120,7 @@ pipeline {
120120
def error_logs_message = ""
121121
dir("${HOMEgfs}/sorc") {
122122
try {
123-
sh(script: './build_all.sh -kgu') // build the global-workflow executables for GFS variant (UFS-wx-model, WW3 pre/post executables)
124-
sh(script: './build_ww3prepost.sh -w > ./logs/build_ww3prepost_gefs.log 2>&1') // build the WW3 pre/post processing executables for GEFS variant
125-
sh(script: './build_ufs.sh -w -e gefs_model.x > ./logs/build_ufs_gefs.log 2>&1') // build the UFS-wx-model executable for GEFS variant
123+
sh(script: './build_compute.sh all') // build the global-workflow executables
126124
} catch (Exception error_build) {
127125
echo "Failed to build global-workflow: ${error_build.getMessage()}"
128126
if ( fileExists("logs/error.logs") ) {
@@ -140,8 +138,14 @@ pipeline {
140138
}
141139
}
142140
try {
143-
sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}")
144-
gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim()
141+
sh(script: """
142+
source ${HOMEgfs}/workflow/gw_setup.sh
143+
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}
144+
""")
145+
gist_url=sh(script: """
146+
source ${HOMEgfs}/workflow/gw_setup.sh
147+
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}
148+
""", returnStdout: true).trim()
145149
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
146150
} catch (Exception error_comment) {
147151
echo "Failed to comment on PR: ${error_comment.getMessage()}"
@@ -160,7 +164,10 @@ pipeline {
160164
}
161165
}
162166
// Get a list of CI cases to run
163-
CI_CASES = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
167+
CI_CASES = sh(script: """
168+
source ${HOMEgfs}/workflow/gw_setup.sh
169+
${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}
170+
""", returnStdout: true).trim().split()
164171
echo "Cases to run: ${CI_CASES}"
165172
}
166173
}
@@ -181,7 +188,10 @@ pipeline {
181188
script {
182189
env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS"
183190
try {
184-
error_output = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml", returnStdout: true).trim()
191+
error_output = sh(script: """
192+
source ${HOMEgfs}/workflow/gw_setup.sh
193+
${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml
194+
""", returnStdout: true).trim()
185195
} catch (Exception error_create) {
186196
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experiment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """)
187197
error("Case ${caseName} failed to create experiment directory")
@@ -196,10 +206,19 @@ pipeline {
196206
def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs"
197207
sh(script: " rm -f ${error_file}")
198208
try {
199-
sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow'")
200-
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}")
209+
sh(script: """
210+
source ${HOMEgfs}/workflow/gw_setup.sh
211+
${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow'
212+
""")
213+
sh(script: """
214+
source ${HOMEgfs}/workflow/gw_setup.sh
215+
${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}
216+
""")
201217
} catch (Exception error_experment) {
202-
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}")
218+
sh(script: """
219+
source ${HOMEgfs}/workflow/gw_setup.sh
220+
${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}
221+
""")
203222
ws(CUSTOM_WORKSPACE) {
204223
def error_logs = ""
205224
def error_logs_message = ""
@@ -219,9 +238,15 @@ pipeline {
219238
}
220239
}
221240
try {
222-
gist_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}", returnStdout: true).trim()
241+
gist_url = sh(script: """
242+
source ${HOMEgfs}/workflow/gw_setup.sh
243+
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}
244+
""", returnStdout: true).trim()
223245
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${caseName} **FAILED** on ${Machine} in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
224-
sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}")
246+
sh(script: """
247+
source ${HOMEgfs}/workflow/gw_setup.sh
248+
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}
249+
""")
225250
} catch (Exception error_comment) {
226251
echo "Failed to comment on PR: ${error_comment.getMessage()}"
227252
}

ci/scripts/utils/launch_java_agent.sh

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -65,14 +65,14 @@ controller_url="https://jenkins.epic.oarcloud.noaa.gov"
6565
controller_user=${controller_user:-"terry.mcguinness"}
6666
controller_user_auth_token="jenkins_token"
6767

68-
HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )"
68+
HOMEGFS_="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )"
6969
host=$(hostname)
7070

7171
#########################################################################
7272
# Set up runtime environment varibles for accounts on supproted machines
7373
#########################################################################
7474

75-
source "${HOMEgfs}/ush/detect_machine.sh"
75+
source "${HOMEGFS_}/ush/detect_machine.sh"
7676
case ${MACHINE_ID} in
7777
hera | orion | hercules | wcoss2 | gaea)
7878
echo "Launch Jenkins Java Controler on ${MACHINE_ID}";;
@@ -84,10 +84,10 @@ esac
8484
LOG=lanuched_agent-$(date +%Y%m%d%M).log
8585
rm -f "${LOG}"
8686

87-
source "${HOMEgfs}/ush/module-setup.sh"
88-
module use "${HOMEgfs}/modulefiles"
87+
source "${HOMEGFS_}/ush/module-setup.sh"
88+
module use "${HOMEGFS_}/modulefiles"
8989
module load "module_gwsetup.${MACHINE_ID}"
90-
source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
90+
source "${HOMEGFS_}/ci/platforms/config.${MACHINE_ID}"
9191

9292
JAVA_HOME="${JENKINS_AGENT_LANUCH_DIR}/JAVA/jdk-17.0.10"
9393
if [[ ! -d "${JAVA_HOME}" ]]; then
@@ -102,9 +102,10 @@ JAVA="${JAVA_HOME}/bin/java"
102102
echo "JAVA VERSION: "
103103
${JAVA} -version
104104

105-
export GH="${HOME}/bin/gh"
106-
[[ -f "${GH}" ]] || echo "gh is not installed in ${HOME}/bin"
105+
GH=$(command -v gh || echo "${HOME}/bin/gh")
106+
[[ -f "${GH}" ]] || ( echo "ERROR: GitHub CLI (gh) not found. (exiting with error)"; exit 1 )
107107
${GH} --version
108+
export GH
108109

109110
check_mark=$("${GH}" auth status -t 2>&1 | grep "Token:" | awk '{print $1}') || true
110111
if [[ "${check_mark}" != "" ]]; then

docs/source/clone.rst

Lines changed: 19 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -18,35 +18,39 @@ Clone the `global-workflow` and `cd` into the `sorc` directory:
1818
git clone --recursive https://github.com/NOAA-EMC/global-workflow
1919
cd global-workflow/sorc
2020

21-
For forecast-only (coupled or uncoupled) build of the components:
21+
.. _build_examples:
22+
23+
The build_all.sh script can be used to build all required components of the global workflow. The accepted arguments is a list of systems to be built. This includes builds for GFS and GEFS forecast-only experiments, GSI and GDASApp-based DA for cycled GFS experiments. See `feature availability <hpc.html#feature-availability-by-hpc>`__ to see which system(s) are available on each supported system.
2224

2325
::
2426

25-
./build_all.sh
27+
./build_all.sh [gfs] [gefs] [gs] [gdas] [all]
2628

27-
For cycled (w/ data assimilation) use the `-g` option during build:
29+
For example, to run GFS experiments with GSI DA, execute:
2830

2931
::
3032

31-
./build_all.sh -g
33+
./build_all.sh gfs gsi
3234

33-
For coupled cycling (include new UFSDA) use the `-gu` options during build:
35+
This builds the GFS, UFS-utils, GFS-utils, WW3 with PDLIB (structured wave grids), UPP, GSI, GSI-monitor, and GSI-utils executables.
3436

35-
[Currently only available on Hera, Orion, and Hercules]
37+
For coupled cycling (include new UFSDA) execute:
3638

3739
::
3840

39-
./build_all.sh -gu
41+
./build_all.sh gfs gdas
4042

43+
This builds all of the same executables, except it builds the GDASApp instead of the GSI.
4144

42-
For building without PDLIB (unstructured grid) for the wave model, use the `-w` options during build:
45+
To run GEFS (forecast-only) execute:
4346

4447
::
4548

46-
./build_all.sh -w
49+
./build_all.sh gefs
4750

51+
This builds the GEFS, UFS-utils, GFS-utils, WW3 *without* PDLIB (unstructure wave grids), and UPP executables.
4852

49-
Build workflow components and link workflow artifacts such as executables, etc.
53+
Once the building is complete, link workflow artifacts such as executables, configuration files, and scripts via
5054

5155
::
5256

@@ -107,40 +111,19 @@ Under the ``/sorc`` folder is a script to build all components called ``build_al
107111

108112
::
109113

110-
./build_all.sh [-a UFS_app][-g][-h][-u][-v]
114+
./build_all.sh [-a UFS_app][-k][-h][-v] [list of system(s) to build]
111115
-a UFS_app:
112116
Build a specific UFS app instead of the default
113-
-g:
114-
Build GSI
117+
-k:
118+
Kill all builds immediately if one fails
115119
-h:
116120
Print this help message and exit
117-
-j:
118-
Specify maximum number of build jobs (n)
119-
-u:
120-
Build UFS-DA
121121
-v:
122122
Execute all build scripts with -v option to turn on verbose where supported
123123

124-
For forecast-only (coupled or uncoupled) build of the components:
125-
126-
::
127-
128-
./build_all.sh
129-
130-
For cycled (w/ data assimilation) use the `-g` option during build:
131-
132-
::
133-
134-
./build_all.sh -g
135-
136-
For coupled cycling (include new UFSDA) use the `-gu` options during build:
137-
138-
[Currently only available on Hera, Orion, and Hercules]
139-
140-
::
141-
142-
./build_all.sh -gu
124+
Lastly, pass to build_all.sh a list of systems to build. This includes `gfs`, `gefs`, `sfs` (not fully supported), `gsi`, `gdas`, and `all`.
143125

126+
For examples of how to use this script, see :ref:`build examples <build_examples>`.
144127

145128
^^^^^^^^^^^^^^^
146129
Link components
@@ -156,4 +139,3 @@ After running the checkout and build scripts run the link script:
156139

157140
Where:
158141
``-o``: Run in operations (NCO) mode. This creates copies instead of using symlinks and is generally only used by NCO during installation into production.
159-

env/AWSPW.env

Lines changed: 27 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,29 @@ else
3333
exit 2
3434
fi
3535

36-
if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
36+
if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then
37+
38+
export POE="NO"
39+
export BACK="NO"
40+
export sys_tp="AWSPW"
41+
export launcher_PREP="srun"
42+
43+
elif [[ "${step}" = "prepsnowobs" ]]; then
44+
45+
export APRUN_CALCFIMS="${APRUN_default}"
46+
47+
elif [[ "${step}" = "prep_emissions" ]]; then
48+
49+
export APRUN="${APRUN_default}"
50+
51+
elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then
52+
53+
export CFP_MP="YES"
54+
if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi
55+
export wavempexec=${launcher}
56+
export wave_mpmd=${mpmd_opt}
57+
58+
elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
3759

3860
export launcher="srun --mpi=pmi2 -l"
3961

@@ -52,52 +74,16 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}
5274

5375
elif [[ "${step}" = "post" ]]; then
5476

55-
export NTHREADS_NP=${NTHREADS1}
56-
export APRUN_NP="${APRUN_default}"
57-
58-
export NTHREADS_DWN=${threads_per_task_dwn:-1}
59-
[[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task}
60-
export APRUN_DWN="${launcher} -n ${ntasks_dwn}"
61-
62-
elif [[ "${step}" = "atmos_products" ]]; then
63-
64-
export USE_CFP="YES" # Use MPMD for downstream product generation on Hera
77+
export NTHREADS_UPP=${NTHREADS1}
78+
export APRUN_UPP="${APRUN_default} --cpus-per-task=${NTHREADS_UPP}"
6579

6680
elif [[ "${step}" = "oceanice_products" ]]; then
6781

6882
export NTHREADS_OCNICEPOST=${NTHREADS1}
6983
export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}"
7084

71-
elif [[ "${step}" = "ecen" ]]; then
72-
73-
export NTHREADS_ECEN=${NTHREADSmax}
74-
export APRUN_ECEN="${APRUN_default}"
75-
76-
export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
77-
[[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
78-
export APRUN_CHGRES="time"
79-
80-
export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
81-
[[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
82-
export APRUN_CALCINC="${APRUN_default}"
83-
84-
elif [[ "${step}" = "esfc" ]]; then
85-
86-
export NTHREADS_ESFC=${NTHREADSmax}
87-
export APRUN_ESFC="${APRUN_default}"
88-
89-
export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
90-
[[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
91-
export APRUN_CYCLE="${APRUN_default}"
92-
93-
elif [[ "${step}" = "epos" ]]; then
94-
95-
export NTHREADS_EPOS=${NTHREADSmax}
96-
export APRUN_EPOS="${APRUN_default}"
97-
98-
elif [[ "${step}" = "fit2obs" ]]; then
85+
elif [[ "${step}" = "atmos_products" ]]; then
9986

100-
export NTHREADS_FIT2OBS=${NTHREADS1}
101-
export MPIRUN="${APRUN_default}"
87+
export USE_CFP="YES" # Use MPMD for downstream product generation on AWS
10288

10389
fi

env/AZUREPW.env

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
1515
# Configure MPI environment
1616
export OMP_STACKSIZE=2048000
1717
export NTHSTACK=1024000000
18+
export UCX_TLS=ud,sm,self
1819

1920
ulimit -s unlimited
2021
ulimit -a
@@ -50,6 +51,10 @@ elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}
5051
export wavempexec=${launcher}
5152
export wave_mpmd=${mpmd_opt}
5253

54+
elif [[ "${step}" = "prep_emissions" ]]; then
55+
56+
export APRUN="${APRUN_default}"
57+
5358
elif [[ "${step}" = "post" ]]; then
5459

5560
export NTHREADS_NP=${NTHREADS1}
@@ -71,33 +76,33 @@ elif [[ "${step}" = "oceanice_products" ]]; then
7176
elif [[ "${step}" = "ecen" ]]; then
7277

7378
export NTHREADS_ECEN=${NTHREADSmax}
74-
export APRUN_ECEN="${APRUN}"
79+
export APRUN_ECEN="${APRUN_default}"
7580

7681
export NTHREADS_CHGRES=${threads_per_task_chgres:-12}
7782
[[ ${NTHREADS_CHGRES} -gt ${max_tasks_per_node} ]] && export NTHREADS_CHGRES=${max_tasks_per_node}
7883
export APRUN_CHGRES="time"
7984

8085
export NTHREADS_CALCINC=${threads_per_task_calcinc:-1}
8186
[[ ${NTHREADS_CALCINC} -gt ${max_threads_per_task} ]] && export NTHREADS_CALCINC=${max_threads_per_task}
82-
export APRUN_CALCINC="${APRUN}"
87+
export APRUN_CALCINC="${APRUN_default}"
8388

8489
elif [[ "${step}" = "esfc" ]]; then
8590

8691
export NTHREADS_ESFC=${NTHREADSmax}
87-
export APRUN_ESFC="${APRUN}"
92+
export APRUN_ESFC="${APRUN_default}"
8893

8994
export NTHREADS_CYCLE=${threads_per_task_cycle:-14}
9095
[[ ${NTHREADS_CYCLE} -gt ${max_tasks_per_node} ]] && export NTHREADS_CYCLE=${max_tasks_per_node}
91-
export APRUN_CYCLE="${APRUN}"
96+
export APRUN_CYCLE="${APRUN_default}"
9297

9398
elif [[ "${step}" = "epos" ]]; then
9499

95100
export NTHREADS_EPOS=${NTHREADSmax}
96-
export APRUN_EPOS="${APRUN}"
101+
export APRUN_EPOS="${APRUN_default}"
97102

98103
elif [[ "${step}" = "fit2obs" ]]; then
99104

100105
export NTHREADS_FIT2OBS=${NTHREADS1}
101-
export MPIRUN="${APRUN}"
106+
export MPIRUN="${APRUN_default}"
102107

103108
fi

0 commit comments

Comments
 (0)