Skip to content

Commit

Permalink
Add dockerfile with ngxin+uwsgi, volumes and optional S3 peristent st…
Browse files Browse the repository at this point in the history
…orage
  • Loading branch information
fluential committed Jul 14, 2020
1 parent e96a731 commit 01ae858
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 11 deletions.
24 changes: 15 additions & 9 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
FROM tiangolo/uwsgi-nginx-flask:python3.6-alpine3.7
FROM tiangolo/uwsgi-nginx-flask:python3.8-alpine

ENV STATIC_URL /static
ENV STATIC_PATH /var/www/app/static
COPY . /app

COPY ./requirements.txt /var/www/requirements.txt
COPY docker/.aws /root/.aws
COPY docker/sqlitebackup.sh /usr/local/bin/
COPY docker/supervisor.d/ /etc/supervisor.d/
COPY docker/prestart.sh /app/.

COPY . /app
RUN pip install -r /var/www/requirements.txt
RUN chmod 755 /usr/local/bin/sq*

RUN apk add bash sqlite
RUN pip3 install awscli awscli_plugin_endpoint

RUN cd /usr/local/bin && wget https://raw.githubusercontent.com/jacobtomlinson/docker-sqlite-to-s3/master/sqlite-to-s3.sh && chmod 755 sqlite*

EXPOSE 8080
RUN aws configure set plugins.endpoint awscli_plugin_endpoint

ENTRYPOINT [ "python" ]
CMD [ "/app/server.py" ]
RUN pip install -r /app/requirements.txt
VOLUME /app/data/
4 changes: 2 additions & 2 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import os

# Location of database
DATABASE_FILE_PATH = os.path.abspath(os.getenv('DATABASE_FILE_PATH', 'data.db'))
DATABASE_FILE_PATH = os.path.abspath(os.getenv('DATABASE_FILE_PATH', 'data/data.db'))

# Amount of time before another view by the same user will count
COOKIE_TIMEOUT = 60 * 5
Expand Down Expand Up @@ -42,4 +42,4 @@
# Whitelist of URL patterns to track
# Any URL will be allowed if list is empty
URL_WHITELIST_RE = [
]
]
8 changes: 8 additions & 0 deletions docker/.aws/config
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[profile wasabi]
region = eu-central-1
s3 =
endpoint_url = https://s3.eu-central-1.wasabisys.com
s3api =
endpoint_url = https://s3.eu-central-1.wasabisys.com
[plugins]
endpoint = awscli_plugin_endpoint
27 changes: 27 additions & 0 deletions docker/prestart.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
#!/usr/bin/env bash

: ${DATABASE_FILE_PATH:="/app/data/data.db"}
: ${S3_BUCKET:="sqlite"}

set -e -o pipefail

export DATABASE_PATH=$DATABASE_FILE_PATH S3_BUCKET

err() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): ERROR: $@" >&2
}

status() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): $@"
}

set -e -o pipefail

if [[ ! -z $AWS_ACCESS_KEY_ID ]] && [[ ! -z $AWS_SECRET_ACCESS_KEY ]]; then
status "==> AWS CREDS DETECTED"
if [[ ! -f $DATABASE_PATH ]] ; then
/usr/local/bin/sqlite-to-s3.sh restore
else
status "LOCAL DB FOUND at $DATABASE_PATH!";
fi
fi
65 changes: 65 additions & 0 deletions docker/sqlitebackup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#!/usr/bin/env bash
#
set -eo pipefail

shopt -s nullglob dotglob

PROGNAME=$(basename $0)

# Provide an option to override values via env variables
: ${BKPINTERVAL:="60"}
: ${LOCK_FD:="200"}
: ${LOCK_FILE:="/var/lock/${PROGNAME}.lock"}
: ${S3_BUCKET:="sqlite"}
: ${DATABASE_FILE_PATH:="/app/data/data.db"}

export S3_BUCKET DATABASE_PATH=$DATABASE_FILE_PATH

err() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): ERROR: $@" >&2
}

status() {
echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): $@"
}

lock() {
eval "exec $LOCK_FD>$LOCK_FILE"
flock -n $LOCK_FD || ( err "Cannot aquire lock on ${LOCK_FILE}" ; exit 1; )
}

cleanup() {
shopt -u nullglob dotglob
}

finish() {
local exit_status="${1:-$?}"
if [[ "$exit_status" -eq 0 ]]; then
status "DONE (exit code: ${exit_status})"
else
err "exit code: ${exit_status}"
fi
cleanup
exit $exit_status
}

trap finish SIGHUP SIGINT SIGQUIT SIGTERM ERR

lock

status "Initial delay 30s ..."
sleep 30

while :;do
status "Starting backup"
if [[ ! -z $AWS_ACCESS_KEY_ID ]] && [[ ! -z $AWS_SECRET_ACCESS_KEY ]]; then
/usr/local/bin/sqlite-to-s3.sh backup
else
status "==> NO AWS credentials, backup skipped!"
fi
status "DONE."
status "Next backup in $BKPINTERVAL seconds..."
sleep "$BKPINTERVAL"
done

finish
6 changes: 6 additions & 0 deletions docker/supervisor.d/sqlitebackup.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[program:sqlbackup]
command=/usr/local/bin/sqlitebackup.sh -r
autostart=true
autorestart=true
stderr_logfile=/dev/stderr
stdout_logfile=/dev/stdout

0 comments on commit 01ae858

Please sign in to comment.