Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for SSE-C encryption #1217

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .ci.s3cfg
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ add_headers =
bucket_location = us-east-1
ca_certs_file =
cache_file =
check_ssl_certificate = True
check_ssl_certificate = False
check_ssl_hostname = True
cloudfront_host = cloudfront.amazonaws.com
default_mime_type = binary/octet-stream
Expand Down Expand Up @@ -67,9 +67,9 @@ stop_on_error = False
storage_class =
urlencoding_mode = normal
use_http_expect = False
use_https = False
use_https = True
use_mime_magic = True
verbosity = WARNING
website_endpoint = http://%(bucket)s.s3-website-%(location)s.amazonaws.com/
website_endpoint = https://%(bucket)s.s3-website-%(location)s.amazonaws.com/
website_error =
website_index = index.html
20 changes: 18 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,17 +28,33 @@ jobs:
with:
path: ~/cache
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.cache-revision }}
- name: Cache certgen
id: cache-certgen
uses: actions/cache@v2
env:
cache-name: cache-certgen
with:
path: ~/.minio/certs
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ env.cache-revision }}
- name: Download minio on cache miss
if: steps.cache-minio.outputs.cache-hit != 'true'
run: |
mkdir -p ~/cache
test ! -e ~/cache/minio && wget -O ~/cache/minio https://dl.minio.io/server/minio/release/linux-amd64/minio || echo "Minio already in cache"
- name: Download certgen on cache miss
if: steps.cache-certgen.outputs.cache-hit != 'true'
run: |
mkdir -p ~/.minio/certs
test ! -e ~/.minio/certs/certgen && wget -O ~/.minio/certs/certgen https://github.com/minio/certgen/releases/download/v0.0.2/certgen-linux-amd64 || echo "Certgen already in cache"
chmod +x ~/.minio/certs/certgen
cd ~/.minio/certs
~/.minio/certs/certgen -ca -host "localhost"
- name: Start a local instance of minio
run: |
export AWS_ACCESS_KEY_ID=Q3AM3UQ867SPQQA43P2F
export AWS_SECRET_ACCESS_KEY=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG
export MINIO_ACCESS_KEY=Q3AM3UQ867SPQQA43P2F
export MINIO_SECRET_KEY=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG
export MINIO_ROOT_USER=Q3AM3UQ867SPQQA43P2F
export MINIO_ROOT_PASSWORD=zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG
chmod +x ~/cache/minio
mkdir -p ~/minio_tmp
~/cache/minio server ~/minio_tmp &
Expand Down
6 changes: 6 additions & 0 deletions S3/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@ class Config(object):
extra_headers = SortedDict(ignore_case = True)
force = False
server_side_encryption = False
sse_customer_key = ""
sse_copy_source_customer_key = ""
enable = None
get_continue = False
put_continue = False
Expand Down Expand Up @@ -293,6 +295,10 @@ def __init__(self, configfile = None, access_key=None, secret_key=None, access_t
warning('Cannot have server_side_encryption (S3 SSE) and KMS_key set (S3 KMS). KMS encryption will be used. Please set server_side_encryption to False')
if self.kms_key and self.signature_v2 == True:
raise Exception('KMS encryption requires signature v4. Please set signature_v2 to False')
if self.sse_customer_key and len(self.sse_customer_key) != 32:
raise Exception('sse-customer-key must be 32 characters')
if self.sse_copy_source_customer_key and len(self.sse_copy_source_customer_key) != 32:
raise Exception('sse_copy_source_customer_key must be 32 characters')

def role_config(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion S3/FileDict.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def get_md5(self, relative_file):
if 'md5' in self[relative_file]:
return self[relative_file]['md5']
md5 = self.get_hardlink_md5(relative_file)
if md5 is None and 'md5' in cfg.sync_checks:
if md5 is None and 'md5' in cfg.preserve_attrs_list:
logging.debug(u"doing file I/O to read md5 of %s" % relative_file)
md5 = Utils.hash_file_md5(self[relative_file]['full_name'])
self.record_md5(relative_file, md5)
Expand Down
17 changes: 17 additions & 0 deletions S3/FileLists.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,23 @@ def _compare(src_list, dst_lst, src_remote, dst_remote, file):
attribs_match = False
debug(u"XFER: %s (md5 mismatch: src=%s dst=%s)" % (file, src_md5, dst_md5))

# Check mtime. This compares local mtime to the upload time of remote file
compare_mtime = 'mtime' in cfg.sync_checks
if attribs_match and compare_mtime:
try:
src_mtime = src_list[file]['mtime']
dst_mtime = dst_list[file]['timestamp']
except (IOError,OSError):
# mtime sum verification failed - ignore that file altogether
debug(u"IGNR: %s (disappeared)" % (file))
warning(u"%s: file disappeared, ignoring." % (file))
raise

if src_mtime > dst_mtime:
## checksums are different.
attribs_match = False
debug(u"XFER: %s (mtime newer than last upload: src=%s dst=%s)" % (file, src_mtime, dst_mtime))

return attribs_match

# we don't support local->local sync, use 'rsync' or something like that instead ;-)
Expand Down
114 changes: 83 additions & 31 deletions S3/S3.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,28 +8,30 @@

from __future__ import absolute_import, division

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've just sorted imports - can revert them back

import sys
import os
import time
import errno
import mimetypes
import io
import mimetypes
import os
import pprint
from xml.sax import saxutils
import sys
import time
from logging import debug, error, info, warning
from socket import timeout as SocketTimeoutException
from logging import debug, info, warning, error
from stat import ST_SIZE
from xml.sax import saxutils

try:
# python 3 support
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
try:
# Python 2 support
from base64 import encodestring
from base64 import encodestring, b64encode
except ImportError:
# Python 3.9.0+ support
from base64 import encodebytes as encodestring
from base64 import b64encode

import select

Expand All @@ -38,26 +40,27 @@
except ImportError:
from md5 import md5

from .BaseUtils import (getListFromXml, getTextFromXml, getRootTagName,
decode_from_s3, encode_to_s3, s3_quote)
from .Utils import (convertHeaderTupleListToDict, hash_file_md5, unicodise,
deunicodise, check_bucket_name,
check_bucket_name_dns_support, getHostnameFromBucket,
calculateChecksum)
from .SortedDict import SortedDict
from .AccessLog import AccessLog
from .ACL import ACL, GranteeLogDelivery
from .BaseUtils import (decode_from_s3, encode_to_s3, getListFromXml,
getRootTagName, getTextFromXml, s3_quote)
from .BidirMap import BidirMap
from .Config import Config
from .ConnMan import ConnMan
from .Crypto import (checksum_sha256_buffer, checksum_sha256_file,
format_param_str, sign_request_v2, sign_request_v4)
from .Exceptions import *
from .MultiPart import MultiPartUpload
from .S3Uri import S3Uri
from .ConnMan import ConnMan
from .Crypto import (sign_request_v2, sign_request_v4, checksum_sha256_file,
checksum_sha256_buffer, format_param_str)
from .SortedDict import SortedDict
from .Utils import (calculateChecksum, check_bucket_name,
check_bucket_name_dns_support,
convertHeaderTupleListToDict, deunicodise,
getHostnameFromBucket, hash_file_md5, unicodise)

try:
from ctypes import ArgumentError

import magic
try:
## https://github.com/ahupp/python-magic
Expand Down Expand Up @@ -701,6 +704,14 @@ def object_put(self, filename, uri, extra_headers = None, extra_label = ""):
headers['x-amz-server-side-encryption'] = 'aws:kms'
headers['x-amz-server-side-encryption-aws-kms-key-id'] = self.config.kms_key

if self.config.sse_customer_key:
sse_customer_key = encode_to_s3(self.config.sse_customer_key)
key_encoded = b64encode(sse_customer_key)
md5_encoded = b64encode(md5(sse_customer_key).digest())
headers["x-amz-server-side-encryption-customer-algorithm"] = "AES256"
headers["x-amz-server-side-encryption-customer-key"] = decode_from_s3(key_encoded)
headers["x-amz-server-side-encryption-customer-key-md5"] = decode_from_s3(md5_encoded)

## MIME-type handling
headers["content-type"] = self.content_type(filename=filename)

Expand Down Expand Up @@ -755,10 +766,30 @@ def object_put(self, filename, uri, extra_headers = None, extra_label = ""):
response = self.send_file(request, src_stream, labels)
return response

def object_get(self, uri, stream, dest_name, start_position = 0, extra_label = ""):
def object_get(self, uri, stream, dest_name, extra_headers, start_position = 0, extra_label = ""):
if uri.type != "s3":
raise ValueError("Expected URI type 's3', got '%s'" % uri.type)
request = self.create_request("OBJECT_GET", uri = uri)
headers = SortedDict(ignore_case=True)
if extra_headers:
headers.update(extra_headers)
## Set server side encryption
if self.config.server_side_encryption:
headers["x-amz-server-side-encryption"] = "AES256"

## Set kms headers
if self.config.kms_key:
headers['x-amz-server-side-encryption'] = 'aws:kms'
headers['x-amz-server-side-encryption-aws-kms-key-id'] = self.config.kms_key

if self.config.sse_customer_key:
sse_customer_key = encode_to_s3(self.config.sse_customer_key)
key_encoded = b64encode(sse_customer_key)
md5_encoded = b64encode(md5(sse_customer_key).digest())
headers["x-amz-server-side-encryption-customer-algorithm"] = "AES256"
headers["x-amz-server-side-encryption-customer-key"] = decode_from_s3(key_encoded)
headers["x-amz-server-side-encryption-customer-key-md5"] = decode_from_s3(md5_encoded)

request = self.create_request("OBJECT_GET", uri = uri, headers=headers)
labels = { 'source' : uri.uri(), 'destination' : dest_name, 'extra' : extra_label }
response = self.recv_file(request, stream, labels, start_position)
return response
Expand Down Expand Up @@ -954,6 +985,14 @@ def object_copy(self, src_uri, dst_uri, extra_headers=None,
headers['x-amz-server-side-encryption-aws-kms-key-id'] = \
self.config.kms_key

if self.config.sse_copy_source_customer_key:
sse_copy_source_customer_key = encode_to_s3(self.config.sse_copy_source_customer_key)
key_encoded = b64encode(sse_copy_source_customer_key)
md5_encoded = b64encode(md5(sse_copy_source_customer_key).digest())
headers["x-amz-copy-source-server-side-encryption-customer-algorithm"] = "AES256"
headers["x-amz-copy-source-server-side-encryption-customer-key"] = decode_from_s3(key_encoded)
headers["x-amz-copy-source-server-side-encryption-customer-key-md5"] = decode_from_s3(md5_encoded)

# Following meta data are not updated in simple COPY by aws.
if extra_headers:
headers.update(extra_headers)
Expand Down Expand Up @@ -1828,19 +1867,32 @@ def send_file(self, request, stream, labels, buffer = '', throttle = 0,
## Non-recoverable error
raise S3Error(response)

debug("MD5 sums: computed=%s, received=%s" % (md5_computed, response["headers"].get('etag', '').strip('"\'')))
## when using KMS encryption, MD5 etag value will not match
md5_from_s3 = response["headers"].get("etag", "").strip('"\'')
if ('-' not in md5_from_s3) and (md5_from_s3 != md5_hash.hexdigest()) and response["headers"].get("x-amz-server-side-encryption") != 'aws:kms':
warning("MD5 Sums don't match!")
if retries:
warning("Retrying upload of %s" % (filename))
return self.send_file(request, stream, labels, buffer, throttle,
retries - 1, offset, chunk_size, use_expect_continue)
if self.config.sse_customer_key:
if response["headers"]["x-amz-server-side-encryption-customer-key-md5"] != \
request.headers["x-amz-server-side-encryption-customer-key-md5"]:
warning("MD5 of customer key don't match!")
if retries:
warning("Retrying upload of %s" % (filename))
return self.send_file(request, stream, labels, buffer, throttle, retries - 1, offset, chunk_size)
else:
warning("Too many failures. Giving up on '%s'" % (filename))
raise S3UploadError
else:
warning("Too many failures. Giving up on '%s'" % (filename))
raise S3UploadError("Too many failures. Giving up on '%s'"
% filename)
debug("Match of x-amz-server-side-encryption-customer-key-md5")
else:
debug("MD5 sums: computed=%s, received=%s" % (md5_computed, response["headers"].get('etag', '').strip('"\'')))
## when using KMS encryption, MD5 etag value will not match
md5_from_s3 = response["headers"].get("etag", "").strip('"\'')
if ('-' not in md5_from_s3) and (md5_from_s3 != md5_hash.hexdigest()) and response["headers"].get("x-amz-server-side-encryption") != 'aws:kms':
warning("MD5 Sums don't match!")
if retries:
warning("Retrying upload of %s" % (filename))
return self.send_file(request, stream, labels, buffer, throttle,
retries - 1, offset, chunk_size, use_expect_continue)
else:
warning("Too many failures. Giving up on '%s'" % (filename))
raise S3UploadError("Too many failures. Giving up on '%s'"
% filename)

return response

Expand Down
18 changes: 18 additions & 0 deletions run-tests-minio.py
Original file line number Diff line number Diff line change
Expand Up @@ -743,6 +743,24 @@ def pbucket(tail):
test_s3cmd("Simple delete with rm", ['rm', '%s/xyz/test_rm/TypeRa.ttf' % pbucket(1)],
must_find = [ "delete: '%s/xyz/test_rm/TypeRa.ttf'" % pbucket(1) ])

## ====== Check SSE-C encrypted object upload
test_s3cmd("Put server-side encrypted object", ['put', 'testsuite/demo/some-file.xml', 's3://%s/xyz/demo/some-file.xml' % bucket(1), '--sse-customer-key=12345678901234567890123456789012'],
retcode=0,
must_find=["upload: 'testsuite/demo/some-file.xml' -> '%s/xyz/demo/some-file.xml'" % pbucket(1)])

## ====== Check SSE-C encrypted object wrong passphrase
test_s3cmd("Get server-side encrypted object with wrong passphrase", ['get', u'%s/xyz/demo/some-file.xml' % pbucket(1), 'testsuite-out', '--sse-customer-key=11111111111111111111111111111111'],
retcode = EX_ACCESSDENIED,
must_find = [ "Access Denied." ])

## ====== Check SSE-C encrypted object download
test_s3cmd("Get server-side encrypted object", ['get', u'%s/xyz/demo/some-file.xml' % pbucket(1), 'testsuite-out', '--sse-customer-key=12345678901234567890123456789012'],
retcode = 0,
must_find = [ "-> 'testsuite-out/some-file.xml'" ])

## ====== Clean up local destination dir
test_flushdir("Clean testsuite-out/", "testsuite-out")

## ====== Create expiration rule with days and prefix
# Minio: disabled
#test_s3cmd("Create expiration rule with days and prefix", ['expire', pbucket(1), '--expiry-days=365', '--expiry-prefix=log/'],
Expand Down
18 changes: 18 additions & 0 deletions run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -733,6 +733,24 @@ def pbucket(tail):
test_s3cmd("Simple delete with rm", ['rm', '%s/xyz/test_rm/TypeRa.ttf' % pbucket(1)],
must_find = [ "delete: '%s/xyz/test_rm/TypeRa.ttf'" % pbucket(1) ])

## ====== Check SSE-C encrypted object upload
test_s3cmd("Put server-side encrypted object", ['put', 'testsuite/demo/some-file.xml', 's3://%s/xyz/demo/some-file.xml' % bucket(1), '--sse-customer-key=12345678901234567890123456789012'],
retcode=0,
must_find=["upload: 'testsuite/demo/some-file.xml' -> '%s/xyz/demo/some-file.xml'" % pbucket(1)])

## ====== Check SSE-C encrypted object wrong passphrase
test_s3cmd("Get server-side encrypted object with wrong passphrase", ['get', u'%s/xyz/demo/some-file.xml' % pbucket(1), 'testsuite-out', '--sse-customer-key=11111111111111111111111111111111'],
retcode = EX_SERVERERROR,
must_find = [ "The calculated MD5 hash of the key did not match" ])

## ====== Check SSE-C encrypted object download
test_s3cmd("Get server-side encrypted object", ['get', u'%s/xyz/demo/some-file.xml' % pbucket(1), 'testsuite-out', '--sse-customer-key=12345678901234567890123456789012'],
retcode = 0,
must_find = [ "-> 'testsuite-out/some-file.xml'" ])

## ====== Clean up local destination dir
test_flushdir("Clean testsuite-out/", "testsuite-out")

## ====== Create expiration rule with days and prefix
test_s3cmd("Create expiration rule with days and prefix", ['expire', pbucket(1), '--expiry-days=365', '--expiry-prefix=log/'],
must_find = [ "Bucket '%s/': expiration configuration is set." % pbucket(1)])
Expand Down
Loading