diff --git a/Dockerfile b/Dockerfile index c654f01..90c6702 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,20 @@ -FROM tiangolo/uwsgi-nginx-flask:python3.6-alpine3.7 +FROM tiangolo/uwsgi-nginx-flask:python3.8-alpine -ENV STATIC_URL /static -ENV STATIC_PATH /var/www/app/static +COPY . /app -COPY ./requirements.txt /var/www/requirements.txt +COPY docker/.aws /root/.aws +COPY docker/sqlitebackup.sh /usr/local/bin/ +COPY docker/supervisor.d/ /etc/supervisor.d/ +COPY docker/prestart.sh /app/. -COPY . /app -RUN pip install -r /var/www/requirements.txt +RUN chmod 755 /usr/local/bin/sq* + +RUN apk add bash sqlite +RUN pip3 install awscli awscli_plugin_endpoint + +RUN cd /usr/local/bin && wget https://raw.githubusercontent.com/jacobtomlinson/docker-sqlite-to-s3/master/sqlite-to-s3.sh && chmod 755 sqlite* -EXPOSE 8080 +RUN aws configure set plugins.endpoint awscli_plugin_endpoint -ENTRYPOINT [ "python" ] -CMD [ "/app/server.py" ] +RUN pip install -r /app/requirements.txt +VOLUME /app/data/ diff --git a/config.py b/config.py index 1016d95..59e01e9 100644 --- a/config.py +++ b/config.py @@ -1,7 +1,7 @@ import os # Location of database -DATABASE_FILE_PATH = os.path.abspath(os.getenv('DATABASE_FILE_PATH', 'data.db')) +DATABASE_FILE_PATH = os.path.abspath(os.getenv('DATABASE_FILE_PATH', 'data/data.db')) # Amount of time before another view by the same user will count COOKIE_TIMEOUT = 60 * 5 @@ -42,4 +42,4 @@ # Whitelist of URL patterns to track # Any URL will be allowed if list is empty URL_WHITELIST_RE = [ -] \ No newline at end of file +] diff --git a/docker/.aws/config b/docker/.aws/config new file mode 100644 index 0000000..24467c5 --- /dev/null +++ b/docker/.aws/config @@ -0,0 +1,8 @@ +[profile wasabi] +region = eu-central-1 +s3 = + endpoint_url = https://s3.eu-central-1.wasabisys.com +s3api = + endpoint_url = https://s3.eu-central-1.wasabisys.com +[plugins] +endpoint = awscli_plugin_endpoint diff --git a/docker/prestart.sh b/docker/prestart.sh new file mode 100644 index 0000000..be5ec7d --- /dev/null +++ b/docker/prestart.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +: ${DATABASE_FILE_PATH:="/app/data/data.db"} +: ${S3_BUCKET:="sqlite"} + +set -e -o pipefail + +export DATABASE_PATH=$DATABASE_FILE_PATH S3_BUCKET + +err() { + echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): ERROR: $@" >&2 +} + +status() { + echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): $@" +} + +set -e -o pipefail + +if [[ ! -z $AWS_ACCESS_KEY_ID ]] && [[ ! -z $AWS_SECRET_ACCESS_KEY ]]; then + status "==> AWS CREDS DETECTED" + if [[ ! -f $DATABASE_PATH ]] ; then + /usr/local/bin/sqlite-to-s3.sh restore + else + status "LOCAL DB FOUND at $DATABASE_PATH!"; + fi +fi diff --git a/docker/sqlitebackup.sh b/docker/sqlitebackup.sh new file mode 100644 index 0000000..925231b --- /dev/null +++ b/docker/sqlitebackup.sh @@ -0,0 +1,65 @@ +#!/usr/bin/env bash +# +set -eo pipefail + +shopt -s nullglob dotglob + +PROGNAME=$(basename $0) + +# Provide an option to override values via env variables +: ${BKPINTERVAL:="60"} +: ${LOCK_FD:="200"} +: ${LOCK_FILE:="/var/lock/${PROGNAME}.lock"} +: ${S3_BUCKET:="sqlite"} +: ${DATABASE_FILE_PATH:="/app/data/data.db"} + +export S3_BUCKET DATABASE_PATH=$DATABASE_FILE_PATH + +err() { + echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): ERROR: $@" >&2 +} + +status() { + echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')] ($PROGNAME): $@" +} + +lock() { + eval "exec $LOCK_FD>$LOCK_FILE" + flock -n $LOCK_FD || ( err "Cannot aquire lock on ${LOCK_FILE}" ; exit 1; ) +} + +cleanup() { + shopt -u nullglob dotglob +} + +finish() { + local exit_status="${1:-$?}" + if [[ "$exit_status" -eq 0 ]]; then + status "DONE (exit code: ${exit_status})" + else + err "exit code: ${exit_status}" + fi + cleanup + exit $exit_status +} + +trap finish SIGHUP SIGINT SIGQUIT SIGTERM ERR + +lock + +status "Initial delay 30s ..." +sleep 30 + +while :;do + status "Starting backup" + if [[ ! -z $AWS_ACCESS_KEY_ID ]] && [[ ! -z $AWS_SECRET_ACCESS_KEY ]]; then + /usr/local/bin/sqlite-to-s3.sh backup + else + status "==> NO AWS credentials, backup skipped!" + fi + status "DONE." + status "Next backup in $BKPINTERVAL seconds..." + sleep "$BKPINTERVAL" +done + +finish diff --git a/docker/supervisor.d/sqlitebackup.ini b/docker/supervisor.d/sqlitebackup.ini new file mode 100644 index 0000000..e1abb70 --- /dev/null +++ b/docker/supervisor.d/sqlitebackup.ini @@ -0,0 +1,6 @@ +[program:sqlbackup] +command=/usr/local/bin/sqlitebackup.sh -r +autostart=true +autorestart=true +stderr_logfile=/dev/stderr +stdout_logfile=/dev/stdout