Skip to content

Commit cc617e3

Browse files
committed
Run PR gpu utests/relvals on both CUDA and ROCm GPUs
1 parent b3e6449 commit cc617e3

18 files changed

+153
-56
lines changed

cleanup-cmssdt

+1-1
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ done
2626

2727
RUNS_TO_KEEP=1
2828
#Keep $JOBS_TO_KEEP of these with RUNS_TO_KEEP
29-
for type in "" CUDA ROCM $(PYTHONPATH=${CMS_BOT_DIR} python3 -c 'from process_pr import EXTRA_RELVALS_TESTS;print(" ".join([x.upper().replace("-","_") for x in EXTRA_RELVALS_TESTS if x]))') ; do
29+
for type in "" $(cat "gpu_flavors.txt" | tr '[:lower:]' '[:upper:]' | tr '\n' ' ') $(PYTHONPATH=${CMS_BOT_DIR} python3 -c 'from process_pr import EXTRA_RELVALS_TESTS;print(" ".join([x.upper().replace("-","_") for x in EXTRA_RELVALS_TESTS if x]))') ; do
3030
dir=baseLineComparisons${type}
3131
[ -d ${JENKINS_ARTIFACTS}/$dir ] || continue
3232
DIRS_PROCESSED="${DIRS_PROCESSED} ${dir}"

cmssw-pr-test-config

+8
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11
#!/bin/bash
2+
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
3+
readarray -t ALL_GPU_TYPES < ${SCRIPTPATH}/gpu_flavors.txt
24
CMSSW_VER=$CMSSW_VERSION
35
[ "${CMSSW_VER}" != "" ] || CMSSW_VER=${RELEASE_FORMAT}
46
CMSSW_MAJOR=0
@@ -20,6 +22,12 @@ elif [ "$CMSSW_VER" -ge 1300 ] ; then
2022
else
2123
PR_TEST_MATRIX_EXTRAS_GPU=11634.586,11634.587
2224
fi
25+
if [ X"$PR_TEST_MATRIX_EXTRAS_GPU" != X"" ]; then
26+
for GPU_T in ${ALL_GPU_TYPES[@]} ; do
27+
GPU_T_UC=$(echo ${GPU_T} | tr '[a-z]' '[A-Z]')
28+
eval "PR_TEST_MATRIX_EXTRAS_${GPU_T_UC}=$PR_TEST_MATRIX_EXTRAS_GPU"
29+
done
30+
fi
2331
PR_TEST_MATRIX_EXTRAS_PROFILING=29834.21,13034.21
2432
PR_TEST_MATRIX_EXTRAS_HIGH_STATS=35034.0
2533
PR_TEST_MATRIX_EXTRAS_NANO=all

gpu_flavors.txt

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
cuda
2+
rocm

pr_testing/_helper_functions.sh

+14-1
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ function prepare_upload_results (){
7979
else
8080
mkdir -p upload
8181
fi
82-
for f in external_checks git-recent-commits.json cmssw.tar.gz unitTests gpuUnitTests dasqueries testsResults build-logs clang-logs runTheMatrix*-results llvm-analysis *.log *.html *.txt *.js DQMTestsResults valgrindResults-* cfg-viewerResults igprof-results-data git-merge-result git-log-recent-commits addOnTests codeRules dupDict material-budget cmsset_default; do
82+
for f in external_checks git-recent-commits.json cmssw.tar.gz unitTests *UnitTests dasqueries testsResults build-logs clang-logs runTheMatrix*-results llvm-analysis *.log *.html *.txt *.js DQMTestsResults valgrindResults-* cfg-viewerResults igprof-results-data git-merge-result git-log-recent-commits addOnTests codeRules dupDict material-budget cmsset_default; do
8383
[ -e $f ] && mv $f upload/$f
8484
done
8585
if [ -e upload/renderPRTests.js ] ; then mkdir -p upload/js && mv upload/renderPRTests.js upload/js/ ; fi
@@ -120,3 +120,16 @@ function prepare_upload_results (){
120120
mkdir -p ${RESULTS_DIR}
121121
popd
122122
}
123+
124+
function is_in_array() {
125+
local value="$1"
126+
shift
127+
local array=("$@")
128+
129+
for item in "${array[@]}"; do
130+
if [[ "$item" == "$value" ]]; then
131+
return 0 # Found match
132+
fi
133+
done
134+
return 1 # No match
135+
}

pr_testing/run-pr-comparisons

+1-1
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ for WF in ${WORKFLOWS_TO_COMPARE//,/ }; do
264264
done
265265

266266
echo $WFS_WITH_DAS_INCONSISTENCY >> $WORKSPACE/$DAS_NON_CONSISTENT_WFS_FILE
267-
echo "COMPARISON${TEST_FLAVOR_STR};RUNNING,Comparison with the ${UC_TEST_FLAVOR} baseline,See results,See results" >> ${RESULTS_FILE}
267+
echo "COMPARISON${TEST_FLAVOR_STR};RUNNING,Comparison with the ${UC_TEST_FLAVOR} baseline,See results,/SDT/jenkins-artifacts/${COMP_UPLOAD_DIR}/" >> ${RESULTS_FILE}
268268
if [ "$DRY_RUN" = "" ] ; then
269269
send_jenkins_artifacts ${RESULTS_FILE} ${PR_BASELINE_JOBDIR}/testsResults/comparison${UC_TEST_FLAVOR}.txt
270270
fi

pr_testing/run-pr-relvals.sh

+6-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
#!/bin/bash -ex
22
source $(dirname $0)/setup-pr-test-env.sh
3+
readarray -t ALL_GPU_TYPES < ${CMS_BOT_DIR}/gpu_flavors.txt
4+
35
GH_CONTEXT="relvals"
46
GH_COMP_CONTEXT="comparison"
57
UC_TEST_FLAVOR=$(echo ${TEST_FLAVOR} | tr '[a-z]' '[A-Z]')
@@ -17,10 +19,12 @@ echo "${MATRIX_ARGS}" | tr ';' '\n' | while IFS= read -r args; do
1719
if [ $(echo "${args}" | sed 's|.*-l ||;s| .*||' | tr ',' '\n' | grep '^all$' | wc -l) -gt 0 ] ; then
1820
OPTS=""
1921
case "${TEST_FLAVOR}" in
20-
gpu ) OPTS="-w gpu" ;;
2122
high_stats ) ;;
2223
nano ) OPTS="-w nano" ;;
23-
* ) ;;
24+
* ) if is_in_array "${TEST_FLAVOR}" "${ALL_GPU_TYPES[@]}" ; then
25+
OPTS="-w gpu"
26+
fi
27+
;;
2428
esac
2529
ALL_WFS=$(runTheMatrix.py -n ${OPTS} ${args} | grep -v ' workflows ' | grep '^[1-9][0-9]*\(.[0-9][0-9]*\|\)\s' | sed 's| .*||' | tr '\n' ',' | sed 's|,$||')
2630
args=$(echo "${args}" | sed "s|all|${ALL_WFS}|")

pr_testing/run-pr-unittests.sh

+28-26
Original file line numberDiff line numberDiff line change
@@ -10,57 +10,59 @@ cd $WORKSPACE/${CMSSW_VERSION}
1010
CMSSW_PKG_COUNT=$(ls -d $LOCALRT/src/*/* | wc -l)
1111
REPORT_OPTS="--report-url ${PR_RESULT_URL} $NO_POST"
1212

13-
rm -f ${RESULTS_DIR}/unittestGPU.txt
14-
mark_commit_status_all_prs 'unittests/gpu' 'pending' -u "${BUILD_URL}" -d "Running tests" || true
13+
rm -f ${RESULTS_DIR}/unittest${TEST_FLAVOR}.txt
14+
mark_commit_status_all_prs "unittests/${TEST_FLAVOR}" 'pending' -u "${BUILD_URL}" -d "Running tests" || true
1515
echo '--------------------------------------'
16-
mkdir -p $WORKSPACE/gpuUnitTests
16+
mkdir -p $WORKSPACE/${TEST_FLAVOR}UnitTests
1717
let UT_TIMEOUT=7200+${CMSSW_PKG_COUNT}*20
18-
UTESTS_CMD="USER_UNIT_TESTS=cuda timeout ${UT_TIMEOUT} scram b -v -k -j ${NCPU} unittests "
18+
gpu_t_lc=$(echo ${TEST_FLAVOR} | tr '[A-Z]' '[a-z]')
19+
UTESTS_CMD="USER_UNIT_TESTS=${gpu_t_lc} timeout ${UT_TIMEOUT} scram b -v -k -j ${NCPU} unittests "
1920
echo "LD_LIBRARY_PATH: ${LD_LIBRARY_PATH}"
2021
scram build echo_LD_LIBRARY_PATH || true
2122
scram build -r echo_CXX || true
2223
cms_major=$(echo ${CMSSW_IB} | cut -d_ -f2)
2324
cms_minor=$(echo ${CMSSW_IB} | cut -d_ -f3)
2425
cms_ver="$(echo 00${cms_major} | sed -E 's|^.*(..)$|\1|')$(echo 00${cms_minor} | sed -E 's|^.*(..)$|\1|')"
25-
echo $UTESTS_CMD > $WORKSPACE/gpuUnitTests/log.txt
26-
(eval $UTESTS_CMD && echo 'ALL_OK') > $WORKSPACE/gpuUnitTests/log.txt 2>&1 || true
26+
echo $UTESTS_CMD > $WORKSPACE/${TEST_FLAVOR}UnitTests/log.txt
27+
(eval $UTESTS_CMD && echo 'ALL_OK') > $WORKSPACE/${TEST_FLAVOR}UnitTests/log.txt 2>&1 || true
2728
echo 'END OF UNIT TESTS'
2829
echo '--------------------------------------'
2930

30-
TEST_ERRORS=$(grep -ai 'had errors\|recipe for target' $WORKSPACE/gpuUnitTests/log.txt | sed "s|'||g;s|.*recipe for target *||;s|.*unittests_|---> test |;s| failed$| timeout|" || true)
31-
TEST_ERRORS=`grep -ai "had errors" $WORKSPACE/gpuUnitTests/log.txt` || true
32-
GENERAL_ERRORS=`grep -a "ALL_OK" $WORKSPACE/gpuUnitTests/log.txt` || true
31+
TEST_ERRORS=$(grep -ai 'had errors\|recipe for target' $WORKSPACE/${TEST_FLAVOR}UnitTests/log.txt | sed "s|'||g;s|.*recipe for target *||;s|.*unittests_|---> test |;s| failed$| timeout|" || true)
32+
TEST_ERRORS=`grep -ai "had errors" $WORKSPACE/${TEST_FLAVOR}UnitTests/log.txt` || true
33+
GENERAL_ERRORS=`grep -a "ALL_OK" $WORKSPACE/${TEST_FLAVOR}UnitTests/log.txt` || true
3334

35+
TEST_FLAVOR_UC=$(echo $TEST_FLAVOR | tr '[:lower:]' '[:upper:]')
3436
if [ "X$TEST_ERRORS" != "X" -o "X$GENERAL_ERRORS" = "X" ]; then
35-
echo "Errors in the gpu unit tests"
36-
echo 'GPU_UNIT_TEST_RESULTS;ERROR,GPU Unit Tests,See Log,gpuUnitTests' >> ${RESULTS_DIR}/unittestGPU.txt
37+
echo "Errors in the ${TEST_FLAVOR} unit tests"
38+
echo "${TEST_FLAVOR_UC}_UNIT_TEST_RESULTS;ERROR,Unit Tests ${TEST_FLAVOR_UC},See Log,${TEST_FLAVOR}UnitTests" >> ${RESULTS_DIR}/unittest${TEST_FLAVOR}.txt
3739
ALL_OK=false
3840
UNIT_TESTS_OK=false
39-
$CMS_BOT_DIR/report-pull-request-results PARSE_GPU_UNIT_TESTS_FAIL -f $WORKSPACE/gpuUnitTests/log.txt --report-file ${RESULTS_DIR}/14-unittestGPU-report.res ${REPORT_OPTS}
40-
echo "GpuUnitTests" > ${RESULTS_DIR}/14-failed.res
41+
$CMS_BOT_DIR/report-pull-request-results PARSE_${TEST_FLAVOR_UC}_UNIT_TESTS_FAIL -f $WORKSPACE/${TEST_FLAVOR}UnitTests/log.txt --report-file ${RESULTS_DIR}/14-unittest${TEST_FLAVOR}-report.res ${REPORT_OPTS}
42+
echo "${TEST_FLAVOR}UnitTests" > ${RESULTS_DIR}/14-${TEST_FLAVOR}-failed.res
4143
else
42-
echo 'GPU_UNIT_TEST_RESULTS;OK,GPU Unit Tests,See Log,gpuUnitTests' >> ${RESULTS_DIR}/unittestGPU.txt
44+
echo "${TEST_FLAVOR_UC}_UNIT_TEST_RESULTS;OK,Unit Tests ${TEST_FLAVOR_UC},See Log,${TEST_FLAVOR}UnitTests" >> ${RESULTS_DIR}/unittest${TEST_FLAVOR}.txt
4345
fi
44-
echo "<html><head></head><body>" > $WORKSPACE/gpuUnitTests/success.html
45-
cp $WORKSPACE/gpuUnitTests/success.html $WORKSPACE/gpuUnitTests/failed.html
46+
echo "<html><head></head><body>" > $WORKSPACE/${TEST_FLAVOR}UnitTests/success.html
47+
cp $WORKSPACE/${TEST_FLAVOR}UnitTests/success.html $WORKSPACE/${TEST_FLAVOR}UnitTests/failed.html
4648
UT_ERR=false
4749
utlog="testing.log"
4850
for t in $(find $WORKSPACE/$CMSSW_IB/tmp/${SCRAM_ARCH}/src -name ${utlog} -type f | sed "s|$WORKSPACE/$CMSSW_IB/tmp/${SCRAM_ARCH}/||;s|/${utlog}$||") ; do
49-
mkdir -p $WORKSPACE/gpuUnitTests/${t}
50-
mv $WORKSPACE/$CMSSW_IB/tmp/${SCRAM_ARCH}/${t}/${utlog} $WORKSPACE/gpuUnitTests/${t}/
51-
if [ $(grep -a '^\-\-\-> test *[^ ]* *succeeded$' $WORKSPACE/gpuUnitTests/${t}/${utlog} | wc -l) -gt 0 ] ; then
52-
echo "<a href='${t}/${utlog}'>${t}</a><br/>" >> $WORKSPACE/gpuUnitTests/success.html
51+
mkdir -p $WORKSPACE/${TEST_FLAVOR}UnitTests/${t}
52+
mv $WORKSPACE/$CMSSW_IB/tmp/${SCRAM_ARCH}/${t}/${utlog} $WORKSPACE/${TEST_FLAVOR}UnitTests/${t}/
53+
if [ $(grep -a '^\-\-\-> test *[^ ]* *succeeded$' $WORKSPACE/${TEST_FLAVOR}UnitTests/${t}/${utlog} | wc -l) -gt 0 ] ; then
54+
echo "<a href='${t}/${utlog}'>${t}</a><br/>" >> $WORKSPACE/${TEST_FLAVOR}UnitTests/success.html
5355
else
54-
echo "<a href='${t}/${utlog}'>${t}</a><br/>" >> $WORKSPACE/gpuUnitTests/failed.html
56+
echo "<a href='${t}/${utlog}'>${t}</a><br/>" >> $WORKSPACE/${TEST_FLAVOR}UnitTests/failed.html
5557
UT_ERR=true
5658
fi
5759
done
58-
if ! $UT_ERR ; then echo "No unit test failed" >> $WORKSPACE/gpuUnitTests/failed.html ; fi
59-
echo "</body></html>" >> $WORKSPACE/gpuUnitTests/success.html
60-
echo "</body></html>" >> $WORKSPACE/gpuUnitTests/failed.html
60+
if ! $UT_ERR ; then echo "No unit test failed" >> $WORKSPACE/${TEST_FLAVOR}UnitTests/failed.html ; fi
61+
echo "</body></html>" >> $WORKSPACE/${TEST_FLAVOR}UnitTests/success.html
62+
echo "</body></html>" >> $WORKSPACE/${TEST_FLAVOR}UnitTests/failed.html
6163
prepare_upload_results
6264
if $UNIT_TESTS_OK ; then
63-
mark_commit_status_all_prs 'unittests/gpu' 'success' -u "${BUILD_URL}" -d "Passed"
65+
mark_commit_status_all_prs "unittests/${TEST_FLAVOR}" 'success' -u "${BUILD_URL}" -d "Passed"
6466
else
65-
mark_commit_status_all_prs 'unittests/gpu' 'error' -u "${BUILD_URL}" -d "Some unit tests were failed."
67+
mark_commit_status_all_prs "unittests/${TEST_FLAVOR}" 'error' -u "${BUILD_URL}" -d "Some unit tests were failed."
6668
fi

pr_testing/test_multiple_prs.sh

+28-8
Original file line numberDiff line numberDiff line change
@@ -79,20 +79,21 @@ function process_changed_files() {
7979
sort -u "$directlyChangedFiles" $WORKSPACE/indirectly-changed-files.txt > "$allChangedFiles"
8080
}
8181

82+
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )" # Absolute path to script
83+
CMS_BOT_DIR=$(dirname ${SCRIPTPATH}) # To get CMS_BOT dir path
84+
8285
# Constants
8386
echo LD_LIBRARY_PATH=${LD_LIBRARY_PATH} || true
8487
ls ${LD_LIBRARY_PATH} || true
85-
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )" # Absolute path to script
86-
CMS_BOT_DIR=$(dirname ${SCRIPTPATH}) # To get CMS_BOT dir path
8788
export SCRAM_PREFIX_PATH=${CMS_BOT_DIR}/das-utils
8889
source ${CMS_BOT_DIR}/cmsrep.sh
8990
CACHED=${WORKSPACE}/CACHED # Where cached PR metada etc are kept
9091
PR_TESTING_DIR=${CMS_BOT_DIR}/pr_testing
9192
COMMON=${CMS_BOT_DIR}/common
9293
CONFIG_MAP=$CMS_BOT_DIR/config.map
9394
[ "${USE_IB_TAG}" != "true" ] && export USE_IB_TAG=false
94-
[ "${EXTRA_RELVALS_TESTS}" = "" ] && EXTRA_RELVALS_TESTS="GPU THREADING HIGH_STATS NANO"
95-
EXTRA_RELVALS_TESTS=$(echo ${EXTRA_RELVALS_TESTS} | tr ' ' '\n' | grep -v THREADING | tr '\n' ' ')
95+
[ "${EXTRA_RELVALS_TESTS}" = "" ] && EXTRA_RELVALS_TESTS="THREADING HIGH_STATS NANO $(echo ${ALL_GPU_TYPES[@]} | tr '[a-z]' '[A-Z]')"
96+
EXTRA_RELVALS_TESTS=$(echo ${EXTRA_RELVALS_TESTS} | tr ' ' '\n' | grep -v THREADING | grep -v GPU | tr '\n' ' ')
9697
# ---
9798
# doc: Input variable
9899
# PULL_REQUESTS # "cms-sw/cmsdist#4488,cms-sw/cmsdist#4480,cms-sw/cmsdist#4479,cms-sw/root#116"
@@ -163,6 +164,20 @@ if [ $(echo "${CONFIG_LINE}" | grep "PROD_ARCH=1" | wc -l) -gt 0 ] ; then
163164
fi
164165
fi
165166

167+
readarray -t ALL_GPU_TYPES < ${CMS_BOT_DIR}/gpu_flavors.txt
168+
169+
declare -a ENABLE_GPU_FLAVORS
170+
for ex_type in ${ENABLE_BOT_TESTS} ; do
171+
ex_type_lc=$(echo $ex_type | tr '[A-Z]' '[a-z]')
172+
if is_in_array "$ex_type_lc" "${ALL_GPU_TYPES[@]}" ; then
173+
ENABLE_GPU_FLAVORS+=( $ex_type )
174+
VAR_NAME="MATRIX_EXTRAS_${ex_type}"
175+
if [ -z "${!VAR_NAME}" ]; then
176+
eval "$VAR_NAME=${MATRIX_EXTRAS_GPU}"
177+
fi
178+
fi
179+
done
180+
166181
# ----------
167182
# -- MAIN --
168183
# ----------
@@ -1324,9 +1339,8 @@ if [ "X$BUILD_OK" = Xtrue -a "$RUN_TESTS" = "true" ]; then
13241339
done
13251340
fi
13261341
fi
1327-
if [ $(echo ${ENABLE_BOT_TESTS} | tr ',' ' ' | tr ' ' '\n' | grep '^GPU$' | wc -l) -gt 0 -a X"${DISABLE_GPU_TESTS}" != X"true" ] ; then
1342+
if [ ${#ENABLE_GPU_FLAVORS[@]} -ne 0 -a X"${DISABLE_GPU_TESTS}" != X"true" ] ; then
13281343
DO_GPU_TESTS=true
1329-
mark_commit_status_all_prs 'unittests/gpu' 'pending' -u "${BUILD_URL}" -d "Waiting for tests to start"
13301344
fi
13311345
if [ $(echo ${ENABLE_BOT_TESTS} | tr ',' ' ' | tr ' ' '\n' | grep '^HLT_P2_TIMING$' | wc -l) -gt 0 ] ; then
13321346
if [ $(echo ${ARCHITECTURE} | grep "_amd64_" | wc -l) -gt 0 ] ; then
@@ -1499,7 +1513,12 @@ if [ "X$DO_ADDON_TESTS" = Xtrue ]; then
14991513
fi
15001514

15011515
if [ "X$DO_GPU_TESTS" = Xtrue ]; then
1502-
cp $WORKSPACE/test-env.txt $WORKSPACE/run-unittests.prop
1516+
for GPU_T in ${ENABLE_GPU_FLAVORS[@]}; do
1517+
GPU_T_LC=$(echo $GPU_T | tr '[A-Z]' '[a-z]')
1518+
cp $WORKSPACE/test-env.txt $WORKSPACE/run-unittests-${GPU_T_LC}.prop
1519+
echo "TEST_FLAVOR=${GPU_T_LC}" >> $WORKSPACE/run-unittests-${GPU_T_LC}.prop
1520+
mark_commit_status_all_prs "unittests/${GPU_T_LC}" 'pending' -u "${BUILD_URL}" -d "Waiting for tests to start"
1521+
done
15031522
fi
15041523

15051524
if ${BUILD_EXTERNAL} ; then
@@ -1510,7 +1529,7 @@ fi
15101529

15111530
if [ "${DO_PROFILING}" = "true" ] ; then
15121531
PROFILING_WORKFLOWS=$($CMS_BOT_DIR/cmssw-pr-test-config _PROFILING | tr ',' ' ')
1513-
for wf in ${PROFILING_WORKFLOWS};do
1532+
for wf in ${PROFILING_WORKFLOWS}; do
15141533
cp $WORKSPACE/test-env.txt $WORKSPACE/run-profiling-$wf.prop
15151534
echo "PROFILING_WORKFLOWS=${wf}" >> $WORKSPACE/run-profiling-$wf.prop
15161535
done
@@ -1525,3 +1544,4 @@ if [ "${DO_HLT_P2_INTEGRATION}" = "true" ] ; then
15251544
fi
15261545

15271546
rm -f $WORKSPACE/test-env.txt
1547+

process_pr.py

+22-3
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,10 @@ def format(s, **kwds):
143143
REGEX_IGNORE_FILE_COUNT = r"\+file-count"
144144
TEST_WAIT_GAP = 720
145145
ALL_CHECK_FUNCTIONS = None
146-
EXTRA_RELVALS_TESTS = ["threading", "gpu", "high-stats", "nano"]
146+
ALL_GPU_FLAVORS = [
147+
x.strip() for x in open(join(dirname(__file__), "gpu_flavors.txt"), "r").read().splitlines()
148+
]
149+
EXTRA_RELVALS_TESTS = ["threading", "gpu", "high-stats", "nano"] + ALL_GPU_FLAVORS
147150
EXTRA_RELVALS_TESTS_OPTS = "_" + "|_".join(EXTRA_RELVALS_TESTS)
148151
EXTRA_TESTS = (
149152
"|".join(EXTRA_RELVALS_TESTS)
@@ -168,7 +171,7 @@ def format(s, **kwds):
168171
"disable_poison": ["true|false", "DISABLE_POISON"],
169172
"use_ib_tag": ["true|false", "USE_IB_TAG"],
170173
"baseline": ["self|default", "USE_BASELINE"],
171-
"set_env": ["[A-Z][A-Z0-9_]+(\s*,\s*[A-Z][A-Z0-9_]+|)*", "CMSBOT_SET_ENV"],
174+
"set_env": [r"[A-Z][A-Z0-9_]+(\s*,\s*[A-Z][A-Z0-9_]+|)*", "CMSBOT_SET_ENV"],
172175
"skip_test(s|)": [format(r"(%(tests)s)(\s*,\s*(%(tests)s))*", tests=SKIP_TESTS), "SKIP_TESTS"],
173176
"dry_run": ["true|false", "DRY_RUN"],
174177
"jenkins_(slave|node)": [JENKINS_NODES, "RUN_ON_SLAVE"],
@@ -1315,7 +1318,7 @@ def process_pr(repo_config, gh, repo, issue, dryRun, cmsbuild_user=None, force=F
13151318
elif re.match(REGEX_EX_ENABLE_TESTS, first_line, re.I):
13161319
comment_emoji = "-1"
13171320
if valid_commenter:
1318-
enable_tests, ignore = check_enable_bot_tests(first_line.split(" ", 1)[-1])
1321+
enable_tests, _ = check_enable_bot_tests(first_line.split(" ", 1)[-1])
13191322
comment_emoji = "+1"
13201323
elif re.match(r"^allow\s+@([^ ]+)\s+test\s+rights$", first_line, re.I):
13211324
comment_emoji = "-1"
@@ -1375,6 +1378,7 @@ def process_pr(repo_config, gh, repo, issue, dryRun, cmsbuild_user=None, force=F
13751378
first_line, comment_lines, repository
13761379
)
13771380
if test_params_m:
1381+
# Error in parameters
13781382
test_params_msg = str(comment.id) + ":" + test_params_m
13791383
test_params_comment = comment
13801384
continue
@@ -1589,6 +1593,21 @@ def process_pr(repo_config, gh, repo, issue, dryRun, cmsbuild_user=None, force=F
15891593
set_comment_emoji_cache(dryRun, bot_cache, comment, repository)
15901594

15911595
# end of parsing comments section
1596+
1597+
# Extract enabled GPU flavors and remove them from enable_tests
1598+
new_enable_tests = []
1599+
enabled_gpu_flavors = set()
1600+
for test in enable_tests.split():
1601+
if test == "GPU":
1602+
enabled_gpu_flavors.update([x.upper() for x in ALL_GPU_FLAVORS])
1603+
elif test.lower() in ALL_GPU_FLAVORS:
1604+
enabled_gpu_flavors.add(test)
1605+
else:
1606+
new_enable_tests.append(test)
1607+
1608+
new_enable_tests.extend(list(enabled_gpu_flavors))
1609+
enable_tests = " ".join(new_enable_tests)
1610+
15921611
# Check if it needs to be automatically closed.
15931612
if mustClose:
15941613
if issue.state == "open":

report-pull-request-results.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -411,7 +411,7 @@ def read_unit_tests_file(unit_tests_file):
411411
send_message_pr(message)
412412

413413

414-
def read_gpu_tests_file(unit_tests_file):
414+
def read_gpu_tests_file(unit_tests_file, gpu_flavor="GPU"):
415415
errors_found = ""
416416
err_cnt = 0
417417
for line in openlog(unit_tests_file):
@@ -423,8 +423,8 @@ def read_gpu_tests_file(unit_tests_file):
423423
continue
424424
errors_found += line
425425
message = (
426-
"\n## GPU Unit Tests\n\nI found %s errors in the following unit tests:\n\n<pre>%s</pre>"
427-
% (err_cnt, errors_found)
426+
"\n## %s Unit Tests\n\nI found %s errors in the following unit tests:\n\n<pre>%s</pre>"
427+
% (gpu_flavor, err_cnt, errors_found)
428428
)
429429
send_message_pr(message)
430430

@@ -606,6 +606,8 @@ def complain_missing_param(param_name):
606606
GITLOG_FILE_BASE_URL = "%s/git-recent-commits.json" % options.report_url
607607
GIT_CMS_MERGE_TOPIC_BASE_URL = "%s/git-merge-result" % options.report_url
608608

609+
ACTION = ACTION.upper()
610+
609611
if ACTION == "GET_BASE_MESSAGE":
610612
get_base_message()
611613
elif ACTION == "PARSE_UNIT_TESTS_FAIL":
@@ -630,6 +632,10 @@ def complain_missing_param(param_name):
630632
read_material_budget_log_file(options.unit_tests_file)
631633
elif ACTION == "MERGE_COMMITS":
632634
add_to_report(get_recent_merges_message())
635+
elif ACTION == "PARSE_CUDA_UNIT_TESTS_FAIL":
636+
read_gpu_tests_file(options.unit_tests_file, "CUDA")
637+
elif ACTION == "PARSE_ROCM_UNIT_TESTS_FAIL":
638+
read_gpu_tests_file(options.unit_tests_file, "ROCm")
633639
elif ACTION == "PARSE_GPU_UNIT_TESTS_FAIL":
634640
read_gpu_tests_file(options.unit_tests_file)
635641
else:

0 commit comments

Comments
 (0)