Skip to content

Commit 04526f1

Browse files
committed
feat(backup): honor s3 endpoint config value
1 parent 1086b75 commit 04526f1

File tree

4 files changed

+35
-29
lines changed

4 files changed

+35
-29
lines changed

Makefile

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# - Docker image name
44
# - Kubernetes service, rc, pod, secret, volume names
55
SHORT_NAME := postgres
6-
DEIS_REGISTY ?= ${DEV_REGISTRY}/
6+
DEIS_REGISTRY ?= ${DEV_REGISTRY}
77
IMAGE_PREFIX ?= deis
88

99
include versioning.mk

rootfs/bin/create_bucket

+16-18
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ from oauth2client.service_account import ServiceAccountCredentials
1212
from gcloud.storage.client import Client
1313
from gcloud import exceptions
1414
from azure.storage.blob import BlobService
15+
from urllib.parse import urlparse
1516

1617
def bucket_exists(conn, name):
1718
bucket = conn.lookup(name)
@@ -23,25 +24,22 @@ bucket_name = os.getenv('BUCKET_NAME')
2324
region = os.getenv('S3_REGION')
2425

2526
if os.getenv('DATABASE_STORAGE') == "s3":
26-
conn = boto.s3.connect_to_region(region)
27+
if os.getenv('S3_ENDPOINT'):
28+
endpoint = urlparse(os.getenv('S3_ENDPOINT'))
29+
conn = boto.s3.connect_to_region(region,
30+
host=endpoint.hostname,
31+
port=endpoint.port,
32+
path=endpoint.path,
33+
calling_format=boto.s3.connection.OrdinaryCallingFormat())
34+
else:
35+
conn = boto.s3.connect_to_region(region)
36+
2737
if not bucket_exists(conn, bucket_name):
28-
try:
29-
if region == "us-east-1":
30-
# use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125
31-
conn.create_bucket(bucket_name)
32-
else:
33-
conn.create_bucket(bucket_name, location=region)
34-
# NOTE(bacongobbler): for versions prior to v2.9.0, the bucket is created in the default region.
35-
# if we got here, we need to propagate "us-east-1" into WALE_S3_ENDPOINT because the bucket
36-
# exists in a different region and we cannot find it.
37-
# TODO(bacongobbler): deprecate this once we drop support for v2.8.0 and lower
38-
except S3CreateError as err:
39-
if region != 'us-east-1':
40-
print('Failed to create bucket in {}. We are now assuming that the bucket was created in us-east-1.'.format(region))
41-
with open(os.path.join(os.environ['WALE_ENVDIR'], "WALE_S3_ENDPOINT"), "w+") as file:
42-
file.write('https+path://s3.amazonaws.com:443')
43-
else:
44-
raise
38+
if region == "us-east-1":
39+
# use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125
40+
conn.create_bucket(bucket_name)
41+
else:
42+
conn.create_bucket(bucket_name, location=region)
4543

4644
elif os.getenv('DATABASE_STORAGE') == "gcs":
4745
scopes = ['https://www.googleapis.com/auth/devstorage.full_control']

rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh

+15-7
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,25 @@ if [[ "$DATABASE_STORAGE" == "s3" || "$DATABASE_STORAGE" == "minio" ]]; then
66
AWS_ACCESS_KEY_ID=$(cat /var/run/secrets/deis/objectstore/creds/accesskey)
77
AWS_SECRET_ACCESS_KEY=$(cat /var/run/secrets/deis/objectstore/creds/secretkey)
88
if [[ "$DATABASE_STORAGE" == "s3" ]]; then
9+
USE_SSE=$(cat /var/run/secrets/deis/objectstore/creds/use-sse)
910
AWS_REGION=$(cat /var/run/secrets/deis/objectstore/creds/region)
11+
S3_ENDPOINT=$(cat /var/run/secrets/deis/objectstore/creds/endpoint)
1012
BUCKET_NAME=$(cat /var/run/secrets/deis/objectstore/creds/database-bucket)
11-
# Convert $AWS_REGION into $WALE_S3_ENDPOINT to avoid "Connection reset by peer" from
12-
# regions other than us-standard.
13-
# See https://github.com/wal-e/wal-e/issues/167
14-
# See https://github.com/boto/boto/issues/2207
15-
if [[ "$AWS_REGION" == "us-east-1" ]]; then
16-
echo "https+path://s3.amazonaws.com:443" > WALE_S3_ENDPOINT
13+
if [[ "$S3_ENDPOINT" == "" ]]; then
14+
# Convert $AWS_REGION into $WALE_S3_ENDPOINT to avoid "Connection reset by peer" from
15+
# regions other than us-standard.
16+
# See https://github.com/wal-e/wal-e/issues/167
17+
# See https://github.com/boto/boto/issues/2207
18+
if [[ "$AWS_REGION" == "us-east-1" ]]; then
19+
echo "https+path://s3.amazonaws.com:443" > WALE_S3_ENDPOINT
20+
else
21+
echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT
22+
fi
1723
else
18-
echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT
24+
echo "$S3_ENDPOINT" > S3_ENDPOINT
25+
echo "$S3_ENDPOINT" | sed -E -e 's!http(s?)://!http\1+path://!' -e 's!/$!!' > WALE_S3_ENDPOINT
1926
fi
27+
echo $USE_SSE > WALE_S3_SSE
2028
else
2129
AWS_REGION="us-east-1"
2230
BUCKET_NAME="dbwal"

rootfs/patcher-script.d/patch_wal_e_s3.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,10 @@ def wrap_uri_put_file(creds, uri, fp, content_type=None, conn=None):
77
k = s3_util._uri_to_key(creds, uri, conn=conn)
88
if content_type is not None:
99
k.content_type = content_type
10+
encrypt_key = False
1011
if os.getenv('DATABASE_STORAGE') == 's3':
11-
encrypt_key=True
12-
else:
13-
encrypt_key=False
12+
if os.getenv('WALE_S3_SSE', 'false') == 'true':
13+
encrypt_key = True
1414
k.set_contents_from_file(fp, encrypt_key=encrypt_key)
1515
return k
1616
s3.uri_put_file = wrap_uri_put_file

0 commit comments

Comments
 (0)