forked from apache/spark
-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathrelease-build.sh
executable file
·483 lines (400 loc) · 16.1 KB
/
release-build.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
SELF=$(cd $(dirname $0) && pwd)
. "$SELF/release-util.sh"
function exit_with_usage {
cat << EOF
usage: release-build.sh <package|docs|publish-snapshot|publish-release>
Creates build deliverables from a Spark commit.
Top level targets are
package: Create binary packages and commit them to dist.apache.org/repos/dist/dev/spark/
docs: Build docs and commit them to dist.apache.org/repos/dist/dev/spark/
publish-snapshot: Publish snapshot release to Apache snapshots
publish-release: Publish a release to Apache release repo
All other inputs are environment variables
GIT_REF - Release tag or commit to build from
SPARK_PACKAGE_VERSION - Release identifier in top level package directory (e.g. 2.1.2-rc1)
SPARK_VERSION - (optional) Version of Spark being built (e.g. 2.1.2)
ASF_USERNAME - Username of ASF committer account
ASF_PASSWORD - Password of ASF committer account
GPG_KEY - GPG key used to sign release artifacts
GPG_PASSPHRASE - Passphrase for GPG key
EOF
exit 1
}
set -e
if [ $# -eq 0 ]; then
exit_with_usage
fi
if [[ $@ == *"help"* ]]; then
exit_with_usage
fi
if [[ -z "$ASF_PASSWORD" ]]; then
echo 'The environment variable ASF_PASSWORD is not set. Enter the password.'
echo
stty -echo && printf "ASF password: " && read ASF_PASSWORD && printf '\n' && stty echo
fi
if [[ -z "$GPG_PASSPHRASE" ]]; then
echo 'The environment variable GPG_PASSPHRASE is not set. Enter the passphrase to'
echo 'unlock the GPG signing key that will be used to sign the release!'
echo
stty -echo && printf "GPG passphrase: " && read GPG_PASSPHRASE && printf '\n' && stty echo
fi
for env in ASF_USERNAME GPG_PASSPHRASE GPG_KEY; do
if [ -z "${!env}" ]; then
echo "ERROR: $env must be set to run this script"
exit_with_usage
fi
done
export LC_ALL=C.UTF-8
export LANG=C.UTF-8
# Commit ref to checkout when building
GIT_REF=${GIT_REF:-master}
RELEASE_STAGING_LOCATION="https://dist.apache.org/repos/dist/dev/spark"
GPG="gpg -u $GPG_KEY --no-tty --batch --pinentry-mode loopback"
NEXUS_ROOT=https://jfrog.fkinternal.com/artifactory/maven_virtual/
NEXUS_PROFILE=d63f592e7eac0 # Profile for Spark staging uploads
BASE_DIR=$(pwd)
init_java
init_maven_sbt
rm -rf spark
git clone "$ASF_REPO"
cd spark
git checkout $GIT_REF
git_hash=`git rev-parse --short HEAD`
echo "Checked out Spark git hash $git_hash"
if [ -z "$SPARK_VERSION" ]; then
# Run $MVN in a separate command so that 'set -e' does the right thing.
TMP=$(mktemp)
$MVN help:evaluate -Dexpression=project.version > $TMP
SPARK_VERSION=$(cat $TMP | grep -v INFO | grep -v WARNING | grep -vi Download)
rm $TMP
fi
# Depending on the version being built, certain extra profiles need to be activated, and
# different versions of Scala are supported.
BASE_PROFILES="-Pmesos -Pyarn"
if [[ $SPARK_VERSION > "2.3" ]]; then
BASE_PROFILES="$BASE_PROFILES -Pkubernetes"
fi
# TODO: revisit for Scala 2.13
PUBLISH_SCALA_2_11=1
SCALA_2_11_PROFILES="-Pscala-2.11"
if [[ $SPARK_VERSION > "2.3" ]]; then
if [[ $SPARK_VERSION < "3.0." ]]; then
SCALA_2_11_PROFILES="-Pkafka-0-8 -Pflume $SCALA_2_11_PROFILES"
else
PUBLISH_SCALA_2_11=0
fi
fi
PUBLISH_SCALA_2_12=0
SCALA_2_12_PROFILES="-Pscala-2.12"
if [[ $SPARK_VERSION < "3.0." ]]; then
SCALA_2_12_PROFILES="-Pscala-2.12 -Pflume"
fi
if [[ $SPARK_VERSION > "2.4" ]]; then
PUBLISH_SCALA_2_12=1
fi
# Hive-specific profiles for some builds
HIVE_PROFILES="-Phive -Phive-thriftserver"
# Profiles for publishing snapshots and release to Maven Central
# We use Apache Hive 2.3 for publishing
PUBLISH_PROFILES="$BASE_PROFILES $HIVE_PROFILES -Phive-2.3 -Pspark-ganglia-lgpl -Pkinesis-asl"
# Profiles for building binary releases
BASE_RELEASE_PROFILES="$BASE_PROFILES -Psparkr"
if [[ $JAVA_VERSION < "1.8." ]]; then
echo "Java version $JAVA_VERSION is less than required 1.8 for 2.2+"
echo "Please set JAVA_HOME correctly."
exit 1
fi
# This is a band-aid fix to avoid the failure of Maven nightly snapshot in some Jenkins
# machines by explicitly calling /usr/sbin/lsof. Please see SPARK-22377 and the discussion
# in its pull request.
LSOF=lsof
if ! hash $LSOF 2>/dev/null; then
LSOF=/usr/sbin/lsof
fi
if [ -z "$SPARK_PACKAGE_VERSION" ]; then
SPARK_PACKAGE_VERSION="${SPARK_VERSION}-$(date +%Y_%m_%d_%H_%M)-${git_hash}"
fi
DEST_DIR_NAME="$SPARK_PACKAGE_VERSION"
git clean -d -f -x
rm -f .gitignore
cd ..
export MAVEN_OPTS="-Xmx12g"
if [[ "$1" == "package" ]]; then
# Source and binary tarballs
echo "Packaging release source tarballs"
cp -r spark spark-$SPARK_VERSION
# For source release in v2.4+, exclude copy of binary license/notice
if [[ $SPARK_VERSION > "2.4" ]]; then
rm -f spark-$SPARK_VERSION/LICENSE-binary
rm -f spark-$SPARK_VERSION/NOTICE-binary
rm -rf spark-$SPARK_VERSION/licenses-binary
fi
tar cvzf spark-$SPARK_VERSION.tgz --exclude spark-$SPARK_VERSION/.git spark-$SPARK_VERSION
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour --output spark-$SPARK_VERSION.tgz.asc \
--detach-sig spark-$SPARK_VERSION.tgz
shasum -a 512 spark-$SPARK_VERSION.tgz > spark-$SPARK_VERSION.tgz.sha512
rm -rf spark-$SPARK_VERSION
ZINC_PORT=3035
# Updated for each binary build
make_binary_release() {
NAME=$1
FLAGS="$MVN_EXTRA_OPTS -B $BASE_RELEASE_PROFILES $2"
# BUILD_PACKAGE can be "withpip", "withr", or both as "withpip,withr"
BUILD_PACKAGE=$3
SCALA_VERSION=$4
PIP_FLAG=""
if [[ $BUILD_PACKAGE == *"withpip"* ]]; then
PIP_FLAG="--pip"
fi
R_FLAG=""
if [[ $BUILD_PACKAGE == *"withr"* ]]; then
R_FLAG="--r"
fi
# We increment the Zinc port each time to avoid OOM's and other craziness if multiple builds
# share the same Zinc server.
ZINC_PORT=$((ZINC_PORT + 1))
echo "Building binary dist $NAME"
cp -r spark spark-$SPARK_VERSION-bin-$NAME
cd spark-$SPARK_VERSION-bin-$NAME
./dev/change-scala-version.sh $SCALA_VERSION
export ZINC_PORT=$ZINC_PORT
echo "Creating distribution: $NAME ($FLAGS)"
# Write out the VERSION to PySpark version info we rewrite the - into a . and SNAPSHOT
# to dev0 to be closer to PEP440.
PYSPARK_VERSION=`echo "$SPARK_VERSION" | sed -e "s/-/./" -e "s/SNAPSHOT/dev0/" -e "s/preview/dev/"`
echo "__version__='$PYSPARK_VERSION'" > python/pyspark/version.py
# Get maven home set by MVN
MVN_HOME=`$MVN -version 2>&1 | grep 'Maven home' | awk '{print $NF}'`
echo "Creating distribution"
./dev/make-distribution.sh --name $NAME --mvn $MVN_HOME/bin/mvn --tgz \
$PIP_FLAG $R_FLAG $FLAGS \
-DzincPort=$ZINC_PORT 2>&1 > ../binary-release-$NAME.log
cd ..
if [[ -n $R_FLAG ]]; then
echo "Copying and signing R source package"
R_DIST_NAME=SparkR_$SPARK_VERSION.tar.gz
cp spark-$SPARK_VERSION-bin-$NAME/R/$R_DIST_NAME .
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour \
--output $R_DIST_NAME.asc \
--detach-sig $R_DIST_NAME
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
SHA512 $R_DIST_NAME > \
$R_DIST_NAME.sha512
fi
if [[ -n $PIP_FLAG ]]; then
echo "Copying and signing python distribution"
PYTHON_DIST_NAME=pyspark-$PYSPARK_VERSION.tar.gz
cp spark-$SPARK_VERSION-bin-$NAME/python/dist/$PYTHON_DIST_NAME .
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour \
--output $PYTHON_DIST_NAME.asc \
--detach-sig $PYTHON_DIST_NAME
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
SHA512 $PYTHON_DIST_NAME > \
$PYTHON_DIST_NAME.sha512
fi
echo "Copying and signing regular binary distribution"
cp spark-$SPARK_VERSION-bin-$NAME/spark-$SPARK_VERSION-bin-$NAME.tgz .
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --armour \
--output spark-$SPARK_VERSION-bin-$NAME.tgz.asc \
--detach-sig spark-$SPARK_VERSION-bin-$NAME.tgz
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --print-md \
SHA512 spark-$SPARK_VERSION-bin-$NAME.tgz > \
spark-$SPARK_VERSION-bin-$NAME.tgz.sha512
}
# List of binary packages built. Populates two associative arrays, where the key is the "name" of
# the package being built, and the values are respectively the needed maven arguments for building
# the package, and any extra package needed for that particular combination.
#
# In dry run mode, only build the first one. The keys in BINARY_PKGS_ARGS are used as the
# list of packages to be built, so it's ok for things to be missing in BINARY_PKGS_EXTRA.
# NOTE: Don't forget to update the valid combinations of distributions at
# 'python/pyspark/install.py' and 'python/docs/source/getting_started/install.rst'
# if you're changing them.
declare -A BINARY_PKGS_ARGS
BINARY_PKGS_ARGS["hadoop3.2"]="-Phadoop-3.2 $HIVE_PROFILES"
if ! is_dry_run; then
BINARY_PKGS_ARGS["without-hadoop"]="-Phadoop-provided"
if [[ $SPARK_VERSION < "3.0." ]]; then
BINARY_PKGS_ARGS["hadoop2.6"]="-Phadoop-2.6 $HIVE_PROFILES"
else
BINARY_PKGS_ARGS["hadoop2.7"]="-Phadoop-2.7 $HIVE_PROFILES"
fi
fi
declare -A BINARY_PKGS_EXTRA
BINARY_PKGS_EXTRA["hadoop3.2"]="withpip,withr"
if [[ $PUBLISH_SCALA_2_11 = 1 ]]; then
key="without-hadoop-scala-2.11"
args="-Phadoop-provided"
extra=""
if ! make_binary_release "$key" "$SCALA_2_11_PROFILES $args" "$extra" "2.11"; then
error "Failed to build $key package. Check logs for details."
fi
fi
if [[ $PUBLISH_SCALA_2_12 = 1 ]]; then
echo "Packages to build: ${!BINARY_PKGS_ARGS[@]}"
for key in ${!BINARY_PKGS_ARGS[@]}; do
args=${BINARY_PKGS_ARGS[$key]}
extra=${BINARY_PKGS_EXTRA[$key]}
if ! make_binary_release "$key" "$SCALA_2_12_PROFILES $args" "$extra" "2.12"; then
error "Failed to build $key package. Check logs for details."
fi
done
fi
rm -rf spark-$SPARK_VERSION-bin-*/
if ! is_dry_run; then
svn co --depth=empty $RELEASE_STAGING_LOCATION svn-spark
rm -rf "svn-spark/${DEST_DIR_NAME}-bin"
mkdir -p "svn-spark/${DEST_DIR_NAME}-bin"
echo "Copying release tarballs"
cp spark-* "svn-spark/${DEST_DIR_NAME}-bin/"
cp pyspark-* "svn-spark/${DEST_DIR_NAME}-bin/"
cp SparkR_* "svn-spark/${DEST_DIR_NAME}-bin/"
svn add "svn-spark/${DEST_DIR_NAME}-bin"
cd svn-spark
svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION" --no-auth-cache
cd ..
rm -rf svn-spark
fi
exit 0
fi
if [[ "$1" == "docs" ]]; then
# Documentation
cd spark
echo "Building Spark docs"
cd docs
# TODO: Make configurable to add this: PRODUCTION=1
PRODUCTION=1 RELEASE_VERSION="$SPARK_VERSION" jekyll build
cd ..
cd ..
if ! is_dry_run; then
svn co --depth=empty $RELEASE_STAGING_LOCATION svn-spark
rm -rf "svn-spark/${DEST_DIR_NAME}-docs"
mkdir -p "svn-spark/${DEST_DIR_NAME}-docs"
echo "Copying release documentation"
cp -R "spark/docs/_site" "svn-spark/${DEST_DIR_NAME}-docs/"
svn add "svn-spark/${DEST_DIR_NAME}-docs"
cd svn-spark
svn ci --username $ASF_USERNAME --password "$ASF_PASSWORD" -m"Apache Spark $SPARK_PACKAGE_VERSION docs" --no-auth-cache
cd ..
rm -rf svn-spark
fi
mv "spark/docs/_site" docs/
exit 0
fi
if [[ "$1" == "publish-snapshot" ]]; then
cd spark
# Publish Spark to Maven release repo
echo "Deploying Spark SNAPSHOT at '$GIT_REF' ($git_hash)"
echo "Publish version is $SPARK_VERSION"
if [[ ! $SPARK_VERSION == *"SNAPSHOT"* ]]; then
echo "ERROR: Snapshots must have a version containing SNAPSHOT"
echo "ERROR: You gave version '$SPARK_VERSION'"
exit 1
fi
# Coerce the requested version
$MVN versions:set -DnewVersion=$SPARK_VERSION
tmp_settings="tmp-settings.xml"
echo "<settings><servers><server>" > $tmp_settings
echo "<id>apache.snapshots.https</id><username>$ASF_USERNAME</username>" >> $tmp_settings
echo "<password>$ASF_PASSWORD</password>" >> $tmp_settings
echo "</server></servers></settings>" >> $tmp_settings
# Generate random point for Zinc
export ZINC_PORT=$(python -S -c "import random; print(random.randrange(3030,4030))")
$MVN -DzincPort=$ZINC_PORT --settings $tmp_settings -DskipTests $SCALA_2_12_PROFILES $PUBLISH_PROFILES deploy
rm $tmp_settings
cd ..
exit 0
fi
if [[ "$1" == "publish-release" ]]; then
cd spark
# Publish Spark to Maven release repo
echo "Publishing Spark checkout at '$GIT_REF' ($git_hash)"
echo "Publish version is $SPARK_VERSION"
# Coerce the requested version
$MVN versions:set -DnewVersion=$SPARK_VERSION
# Using Nexus API documented here:
# https://support.sonatype.com/entries/39720203-Uploading-to-a-Staging-Repository-via-REST-API
if ! is_dry_run; then
echo "Creating Nexus staging repository"
repo_request="<promoteRequest><data><description>Apache Spark $SPARK_VERSION (commit $git_hash)</description></data></promoteRequest>"
out=$(curl -X POST -d "$repo_request" -u $ASF_USERNAME:$ASF_PASSWORD \
-H "Content-Type:application/xml" -v \
$NEXUS_ROOT/profiles/$NEXUS_PROFILE/start)
staged_repo_id=$(echo $out | sed -e "s/.*\(orgapachespark-[0-9]\{4\}\).*/\1/")
echo "Created Nexus staging repository: $staged_repo_id"
fi
tmp_repo=$(mktemp -d spark-repo-XXXXX)
# Generate random point for Zinc
export ZINC_PORT=$(python -S -c "import random; print(random.randrange(3030,4030))")
# TODO: revisit for Scala 2.13 support
if [[ $PUBLISH_SCALA_2_11 = 1 ]]; then
./dev/change-scala-version.sh 2.11
$MVN -DzincPort=$ZINC_PORT -Dmaven.repo.local=$tmp_repo -DskipTests \
$SCALA_2_11_PROFILES $PUBLISH_PROFILES clean install
fi
if [[ $PUBLISH_SCALA_2_12 = 1 ]]; then
./dev/change-scala-version.sh 2.12
$MVN -DzincPort=$((ZINC_PORT + 2)) -Dmaven.repo.local=$tmp_repo -DskipTests \
$SCALA_2_12_PROFILES $PUBLISH_PROFILES clean install
fi
pushd $tmp_repo/org/apache/spark
# Remove any extra files generated during install
find . -type f |grep -v \.jar |grep -v \.pom | xargs rm
echo "Creating hash and signature files"
# this must have .asc, .md5 and .sha1 - it really doesn't like anything else there
for file in $(find . -type f)
do
echo $GPG_PASSPHRASE | $GPG --passphrase-fd 0 --output $file.asc \
--detach-sig --armour $file;
if [ $(command -v md5) ]; then
# Available on OS X; -q to keep only hash
md5 -q $file > $file.md5
else
# Available on Linux; cut to keep only hash
md5sum $file | cut -f1 -d' ' > $file.md5
fi
sha1sum $file | cut -f1 -d' ' > $file.sha1
done
if ! is_dry_run; then
nexus_upload=$NEXUS_ROOT/deployByRepositoryId/$staged_repo_id
echo "Uploading files to $nexus_upload"
for file in $(find . -type f)
do
# strip leading ./
file_short=$(echo $file | sed -e "s/\.\///")
dest_url="$nexus_upload/org/apache/spark/$file_short"
echo " Uploading $file_short"
curl -u $ASF_USERNAME:$ASF_PASSWORD --upload-file $file_short $dest_url
done
echo "Closing nexus staging repository"
repo_request="<promoteRequest><data><stagedRepositoryId>$staged_repo_id</stagedRepositoryId><description>Apache Spark $SPARK_VERSION (commit $git_hash)</description></data></promoteRequest>"
out=$(curl -X POST -d "$repo_request" -u $ASF_USERNAME:$ASF_PASSWORD \
-H "Content-Type:application/xml" -v \
$NEXUS_ROOT/profiles/$NEXUS_PROFILE/finish)
echo "Closed Nexus staging repository: $staged_repo_id"
fi
popd
rm -rf $tmp_repo
cd ..
exit 0
fi
cd ..
rm -rf spark
echo "ERROR: expects to be called with 'package', 'docs', 'publish-release' or 'publish-snapshot'"