Skip to content

Commit

Permalink
Merge branch 'develop' into feature/soilanal_det
Browse files Browse the repository at this point in the history
  • Loading branch information
ClaraDraper-NOAA committed Jan 22, 2025
2 parents 8120fb6 + 4ab8cf8 commit 0f150a9
Show file tree
Hide file tree
Showing 243 changed files with 3,787 additions and 2,472 deletions.
1 change: 1 addition & 0 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -211,3 +211,4 @@ ush/python/pygfs/utils/marine_da_utils.py @guillaumevernieres @AndrewEichmann-NO

# Specific workflow scripts
workflow/generate_workflows.sh @DavidHuber-NOAA
workflow/build_compute.py @DavidHuber-NOAA @aerorahul
4 changes: 2 additions & 2 deletions .github/workflows/docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ jobs:
./.github/scripts/build_docs.sh
- name: Upload documentation (on success)
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: always()
with:
name: documentation
path: artifact/documentation

- name: Upload warnings (on failure)
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
if: failure()
with:
name: documentation_warnings.log
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/linters.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:

- if: ${{ always() }}
name: Upload artifact with ShellCheck defects in SARIF format
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: Differential ShellCheck SARIF
path: ${{ steps.ShellCheck.outputs.sarif }}
Expand Down
8 changes: 4 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,14 @@ parm/ufs/MOM6_data_table.IN
parm/ufs/ice_in.IN
parm/ufs/ufs.configure.*.IN
parm/ufs/post_itag_gfs
parm/ufs/ww3_shel.nml.IN
parm/wafs

# Ignore sorc and logs folders from externals
#--------------------------------------------
sorc/build.xml
sorc/build.db
sorc/build_lock.db
sorc/*log
sorc/logs
sorc/calc_analysis.fd
Expand Down Expand Up @@ -159,10 +163,6 @@ ush/global_cycle_driver.sh
ush/jediinc2fv3.py
ush/ufsda
ush/soca
ush/make_NTC_file.pl
ush/make_ntc_bull.pl
ush/make_tif.sh
ush/month_name.sh
ush/imsfv3_scf2ioda.py
ush/atparse.bash
ush/run_bufr2ioda.py
Expand Down
27 changes: 27 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# ------------------------------------------------------------------------- #
# Global Workflow
# ------------------------------------------------------------------------- #

# Check for minimum cmake requirement
cmake_minimum_required( VERSION 3.20 FATAL_ERROR )

project(global_workflow VERSION 1.0.0)

include(GNUInstallDirs)
enable_testing()

# Build type.
if(NOT CMAKE_BUILD_TYPE MATCHES "^(Debug|Release|RelWithDebInfo|MinSizeRel)$")
message(STATUS "Setting build type to 'Release' as none was specified.")
set(CMAKE_BUILD_TYPE
"Release"
CACHE STRING "Choose the type of build." FORCE)
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release"
"MinSizeRel" "RelWithDebInfo")
endif()

# Build global-workflow source codes
# add_subdirectory(sorc)

# Setup tests
add_subdirectory(ctests)
49 changes: 37 additions & 12 deletions ci/Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,7 @@ pipeline {
def error_logs_message = ""
dir("${HOMEgfs}/sorc") {
try {
sh(script: './build_all.sh -kgu') // build the global-workflow executables for GFS variant (UFS-wx-model, WW3 pre/post executables)
sh(script: './build_ww3prepost.sh -w > ./logs/build_ww3prepost_gefs.log 2>&1') // build the WW3 pre/post processing executables for GEFS variant
sh(script: './build_ufs.sh -w -e gefs_model.x > ./logs/build_ufs_gefs.log 2>&1') // build the UFS-wx-model executable for GEFS variant
sh(script: './build_compute.sh all') // build the global-workflow executables
} catch (Exception error_build) {
echo "Failed to build global-workflow: ${error_build.getMessage()}"
if ( fileExists("logs/error.logs") ) {
Expand All @@ -140,8 +138,14 @@ pipeline {
}
}
try {
sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}")
gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim()
sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}
""")
gist_url=sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}
""", returnStdout: true).trim()
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
} catch (Exception error_comment) {
echo "Failed to comment on PR: ${error_comment.getMessage()}"
Expand All @@ -160,7 +164,10 @@ pipeline {
}
}
// Get a list of CI cases to run
CI_CASES = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
CI_CASES = sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}
""", returnStdout: true).trim().split()
echo "Cases to run: ${CI_CASES}"
}
}
Expand All @@ -181,7 +188,10 @@ pipeline {
script {
env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS"
try {
error_output = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml", returnStdout: true).trim()
error_output = sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml
""", returnStdout: true).trim()
} catch (Exception error_create) {
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experiment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """)
error("Case ${caseName} failed to create experiment directory")
Expand All @@ -196,10 +206,19 @@ pipeline {
def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs"
sh(script: " rm -f ${error_file}")
try {
sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow'")
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}")
sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} 'global-workflow'
""")
sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cleanup_experiment ${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}
""")
} catch (Exception error_experment) {
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}")
sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}
""")
ws(CUSTOM_WORKSPACE) {
def error_logs = ""
def error_logs_message = ""
Expand All @@ -219,9 +238,15 @@ pipeline {
}
}
try {
gist_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}", returnStdout: true).trim()
gist_url = sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}
""", returnStdout: true).trim()
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${caseName} **FAILED** on ${Machine} in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}")
sh(script: """
source ${HOMEgfs}/workflow/gw_setup.sh
${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}
""")
} catch (Exception error_comment) {
echo "Failed to comment on PR: ${error_comment.getMessage()}"
}
Expand Down
21 changes: 21 additions & 0 deletions ci/cases/pr/C48_S2SW_extended.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
experiment:
system: gfs
mode: forecast-only

arguments:
pslot: {{ 'pslot' | getenv }}
app: S2SW
resdetatmos: 48
resdetocean: 5.0
comroot: {{ 'RUNTESTS' | getenv }}/COMROOT
expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR
idate: 2021032312
edate: 2021032312
yaml: {{ HOMEgfs }}/ci/cases/yamls/gfs_extended_ci.yaml

skip_ci_on_hosts:
- hera
- gaea
- orion
- hercules
- wcoss2 # TODO run on WCOSS2 once the gfs_waveawipsbulls job is fixed
1 change: 0 additions & 1 deletion ci/cases/yamls/gefs_defaults_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ base:
ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }}
SFS_POST: "NO"
FHOUT_GFS: 6
stage_ic:
USE_OCN_ENS_PERTURB_FILES: "NO"
USE_ATM_ENS_PERTURB_FILES: "NO"
ocn:
Expand Down
2 changes: 1 addition & 1 deletion ci/cases/yamls/gfs_extended_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ base:
DO_GOES: "YES"
DO_BUFRSND: "YES"
DO_GEMPAK: "YES"
DO_AWIPS: "NO"
DO_AWIPS: "YES"
DO_NPOESS: "YES"
DO_GENESIS_FSU: "NO"
FCST_BREAKPOINTS: 192
Expand Down
7 changes: 3 additions & 4 deletions ci/cases/yamls/sfs_defaults.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ base:
DO_BUFRSND: "NO"
DO_GEMPAK: "NO"
DO_AWIPS: "NO"
KEEPDATA: "YES"
KEEPDATA: "NO"
DO_EXTRACTVARS: "NO"
FHMAX_GFS: 2976
FHMAX_HF_GFS: 0
Expand All @@ -17,6 +17,8 @@ base:
FHOUT_ICE_GFS: 24
FCST_BREAKPOINTS: ""
REPLAY_ICS: "NO"
USE_OCN_ENS_PERTURB_FILES: "YES"
USE_ATM_ENS_PERTURB_FILES: "YES"
HPSSARCH: "NO"
LOCALARCH: "NO"
SFS_POST: "YES"
Expand All @@ -26,8 +28,5 @@ fcst:
MONO: "mono"
reforecast: "YES"
FHZER: 24
stage_ic:
USE_OCN_ENS_PERTURB_FILES: "YES"
USE_ATM_ENS_PERTURB_FILES: "YES"
ocn:
MOM6_INTERP_ICS: "YES"
2 changes: 2 additions & 0 deletions ci/platforms/config.hera
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT
export ICSDIR_ROOT=/scratch1/NCEPDEV/global/glopara/data/ICSDIR

export STAGED_TESTS_DIR=${GFS_CI_ROOT}/STAGED_TESTS_DIR
export HPC_ACCOUNT=nems
export max_concurrent_cases=5
export max_concurrent_pr=4
Expand Down
1 change: 1 addition & 0 deletions ci/platforms/config.orion
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

export GFS_CI_ROOT=/work2/noaa/stmp/GFS_CI_ROOT/ORION
export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR
export STAGED_TESTS_DIR=${GFS_CI_ROOT}/STAGED_TESTS_DIR
export HPC_ACCOUNT=nems
export max_concurrent_cases=5
export max_concurrent_pr=4
Expand Down
15 changes: 8 additions & 7 deletions ci/scripts/utils/launch_java_agent.sh
Original file line number Diff line number Diff line change
Expand Up @@ -65,14 +65,14 @@ controller_url="https://jenkins.epic.oarcloud.noaa.gov"
controller_user=${controller_user:-"terry.mcguinness"}
controller_user_auth_token="jenkins_token"

HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )"
HOMEGFS_="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." >/dev/null 2>&1 && pwd )"
host=$(hostname)

#########################################################################
# Set up runtime environment varibles for accounts on supproted machines
#########################################################################

source "${HOMEgfs}/ush/detect_machine.sh"
source "${HOMEGFS_}/ush/detect_machine.sh"
case ${MACHINE_ID} in
hera | orion | hercules | wcoss2 | gaea)
echo "Launch Jenkins Java Controler on ${MACHINE_ID}";;
Expand All @@ -84,10 +84,10 @@ esac
LOG=lanuched_agent-$(date +%Y%m%d%M).log
rm -f "${LOG}"

source "${HOMEgfs}/ush/module-setup.sh"
module use "${HOMEgfs}/modulefiles"
source "${HOMEGFS_}/ush/module-setup.sh"
module use "${HOMEGFS_}/modulefiles"
module load "module_gwsetup.${MACHINE_ID}"
source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
source "${HOMEGFS_}/ci/platforms/config.${MACHINE_ID}"

JAVA_HOME="${JENKINS_AGENT_LANUCH_DIR}/JAVA/jdk-17.0.10"
if [[ ! -d "${JAVA_HOME}" ]]; then
Expand All @@ -102,9 +102,10 @@ JAVA="${JAVA_HOME}/bin/java"
echo "JAVA VERSION: "
${JAVA} -version

export GH="${HOME}/bin/gh"
[[ -f "${GH}" ]] || echo "gh is not installed in ${HOME}/bin"
GH=$(command -v gh || echo "${HOME}/bin/gh")
[[ -f "${GH}" ]] || ( echo "ERROR: GitHub CLI (gh) not found. (exiting with error)"; exit 1 )
${GH} --version
export GH

check_mark=$("${GH}" auth status -t 2>&1 | grep "Token:" | awk '{print $1}') || true
if [[ "${check_mark}" != "" ]]; then
Expand Down
Loading

0 comments on commit 0f150a9

Please sign in to comment.