Skip to content

Commit 7240393

Browse files
typhoonzeroweiguoz
authored andcommitted
Adding hive CI docker env (#38)
* for adding hive CI
1 parent 58edb8f commit 7240393

File tree

3 files changed

+30
-4
lines changed

3 files changed

+30
-4
lines changed

docker/Dockerfile

+24
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,7 @@ RUN rm -rf /tmp/mysql-connector-java-5.1.47
143143
RUN mkdir /dataset
144144
COPY dataset/popularize_churn.sql /dataset/popularize_churn.sql
145145
COPY dataset/popularize_iris.sql /dataset/popularize_iris.sql
146+
COPY dataset/create_model_db.sql /dataset/create_model_db.sql
146147

147148
# Install the Go compiler.
148149
RUN wget --quiet https://dl.google.com/go/go1.11.5.linux-amd64.tar.gz
@@ -156,5 +157,28 @@ RUN mkdir -p /go/bin
156157
ENV GOPATH /go
157158
ENV PATH $PATH:$GOPATH/bin
158159

160+
# Install python and tensorflow env for run test
161+
ARG CONDA_OS=Linux
162+
RUN cd / && curl -sL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o mconda-install.sh && \
163+
bash -x mconda-install.sh -b -p miniconda && \
164+
rm mconda-install.sh
165+
ENV PATH="/miniconda/bin:$PATH"
166+
167+
RUN ls /miniconda/bin && /miniconda/bin/conda create -y -q -n sqlflow-dev python=3.6 && \
168+
echo ". /miniconda/etc/profile.d/conda.sh" >> ~/.bashrc && \
169+
echo "source activate sqlflow-dev" >> ~/.bashrc && \
170+
bash -c "source activate sqlflow-dev && python -m pip install \
171+
tensorflow==2.0.0-alpha0 \
172+
mysql-connector-python \
173+
impyla \
174+
jupyter"
175+
# Install protobuf
176+
RUN wget --quiet https://github.com/protocolbuffers/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-x86_64.zip && \
177+
apt-get install -y unzip && \
178+
unzip -qq protoc-3.6.1-linux-x86_64.zip -d /usr/local && \
179+
rm protoc-3.6.1-linux-x86_64.zip && \
180+
go get github.com/golang/protobuf/protoc-gen-go && \
181+
mv /go/bin/protoc-gen-go /usr/local/bin/
182+
159183
RUN echo "go get -t sqlflow.org/gohive && go test -v sqlflow.org/gohive" > /build_and_test.bash
160184
RUN chmod +x /build_and_test.bash

docker/dataset/create_model_db.sql

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
CREATE DATABASE IF NOT EXISTS sqlflow_models;

docker/entrypoint.sh

+5-4
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ function configure() {
2020

2121
local var
2222
local value
23-
23+
2424
echo "Configuring $module"
25-
for c in `printenv | perl -sne 'print "$1 " if m/^${envPrefix}_(.+?)=.*/' -- -envPrefix=$envPrefix`; do
25+
for c in `printenv | perl -sne 'print "$1 " if m/^${envPrefix}_(.+?)=.*/' -- -envPrefix=$envPrefix`; do
2626
name=`echo ${c} | perl -pe 's/___/-/g; s/__/_/g; s/_/./g'`
2727
var="${envPrefix}_${c}"
2828
value=${!var}
@@ -69,7 +69,7 @@ if [ -n "$GANGLIA_HOST" ]; then
6969
echo "$module.period=10"
7070
echo "$module.servers=$GANGLIA_HOST:8649"
7171
done > /etc/hadoop/hadoop-metrics.properties
72-
72+
7373
for module in namenode datanode resourcemanager nodemanager mrappmaster jobhistoryserver; do
7474
echo "$module.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31"
7575
echo "$module.sink.ganglia.period=10"
@@ -99,7 +99,7 @@ function wait_for_it()
9999
echo "[$i/$max_try] ${service}:${port} is still not available; giving up after ${max_try} tries. :/"
100100
exit 1
101101
fi
102-
102+
103103
echo "[$i/$max_try] try in ${retry_seconds}s once again ..."
104104
let "i++"
105105
sleep $retry_seconds
@@ -122,5 +122,6 @@ echo "waiting 30 seconds for hive to start..."
122122
sleep 30
123123
hive -f /dataset/popularize_churn.sql
124124
hive -f /dataset/popularize_iris.sql
125+
hive -f /dataset/create_model_db.sql
125126

126127
exec $@

0 commit comments

Comments
 (0)