diff --git a/Makefile b/Makefile index 2a73900..9d03683 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ # - Docker image name # - Kubernetes service, rc, pod, secret, volume names SHORT_NAME := postgres -DEIS_REGISTY ?= ${DEV_REGISTRY}/ +DEIS_REGISTRY ?= ${DEV_REGISTRY} IMAGE_PREFIX ?= deis include versioning.mk diff --git a/rootfs/bin/create_bucket b/rootfs/bin/create_bucket index c03dfe9..fb30af5 100755 --- a/rootfs/bin/create_bucket +++ b/rootfs/bin/create_bucket @@ -12,6 +12,7 @@ from oauth2client.service_account import ServiceAccountCredentials from gcloud.storage.client import Client from gcloud import exceptions from azure.storage.blob import BlobService +from urllib.parse import urlparse def bucket_exists(conn, name): bucket = conn.lookup(name) @@ -23,25 +24,22 @@ bucket_name = os.getenv('BUCKET_NAME') region = os.getenv('S3_REGION') if os.getenv('DATABASE_STORAGE') == "s3": - conn = boto.s3.connect_to_region(region) + if os.getenv('S3_ENDPOINT'): + endpoint = urlparse(os.getenv('S3_ENDPOINT')) + conn = boto.s3.connect_to_region(region, + host=endpoint.hostname, + port=endpoint.port, + path=endpoint.path, + calling_format=boto.s3.connection.OrdinaryCallingFormat()) + else: + conn = boto.s3.connect_to_region(region) + if not bucket_exists(conn, bucket_name): - try: - if region == "us-east-1": - # use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 - conn.create_bucket(bucket_name) - else: - conn.create_bucket(bucket_name, location=region) - # NOTE(bacongobbler): for versions prior to v2.9.0, the bucket is created in the default region. - # if we got here, we need to propagate "us-east-1" into WALE_S3_ENDPOINT because the bucket - # exists in a different region and we cannot find it. - # TODO(bacongobbler): deprecate this once we drop support for v2.8.0 and lower - except S3CreateError as err: - if region != 'us-east-1': - print('Failed to create bucket in {}. We are now assuming that the bucket was created in us-east-1.'.format(region)) - with open(os.path.join(os.environ['WALE_ENVDIR'], "WALE_S3_ENDPOINT"), "w+") as file: - file.write('https+path://s3.amazonaws.com:443') - else: - raise + if region == "us-east-1": + # use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 + conn.create_bucket(bucket_name) + else: + conn.create_bucket(bucket_name, location=region) elif os.getenv('DATABASE_STORAGE') == "gcs": scopes = ['https://www.googleapis.com/auth/devstorage.full_control'] diff --git a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh index e2c00af..addc902 100755 --- a/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh +++ b/rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh @@ -6,17 +6,25 @@ if [[ "$DATABASE_STORAGE" == "s3" || "$DATABASE_STORAGE" == "minio" ]]; then AWS_ACCESS_KEY_ID=$(cat /var/run/secrets/deis/objectstore/creds/accesskey) AWS_SECRET_ACCESS_KEY=$(cat /var/run/secrets/deis/objectstore/creds/secretkey) if [[ "$DATABASE_STORAGE" == "s3" ]]; then + USE_SSE=$(cat /var/run/secrets/deis/objectstore/creds/use-sse) AWS_REGION=$(cat /var/run/secrets/deis/objectstore/creds/region) + S3_ENDPOINT=$(cat /var/run/secrets/deis/objectstore/creds/endpoint) BUCKET_NAME=$(cat /var/run/secrets/deis/objectstore/creds/database-bucket) - # Convert $AWS_REGION into $WALE_S3_ENDPOINT to avoid "Connection reset by peer" from - # regions other than us-standard. - # See https://github.com/wal-e/wal-e/issues/167 - # See https://github.com/boto/boto/issues/2207 - if [[ "$AWS_REGION" == "us-east-1" ]]; then - echo "https+path://s3.amazonaws.com:443" > WALE_S3_ENDPOINT + if [[ "$S3_ENDPOINT" == "" ]]; then + # Convert $AWS_REGION into $WALE_S3_ENDPOINT to avoid "Connection reset by peer" from + # regions other than us-standard. + # See https://github.com/wal-e/wal-e/issues/167 + # See https://github.com/boto/boto/issues/2207 + if [[ "$AWS_REGION" == "us-east-1" ]]; then + echo "https+path://s3.amazonaws.com:443" > WALE_S3_ENDPOINT + else + echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT + fi else - echo "https+path://s3-${AWS_REGION}.amazonaws.com:443" > WALE_S3_ENDPOINT + echo "$S3_ENDPOINT" > S3_ENDPOINT + echo "$S3_ENDPOINT" | sed -E -e 's!http(s?)://!http\1+path://!' -e 's!/$!!' > WALE_S3_ENDPOINT fi + echo $USE_SSE > WALE_S3_SSE else AWS_REGION="us-east-1" BUCKET_NAME="dbwal" diff --git a/rootfs/patcher-script.d/patch_wal_e_s3.py b/rootfs/patcher-script.d/patch_wal_e_s3.py index 1b2d4ea..fd248b3 100644 --- a/rootfs/patcher-script.d/patch_wal_e_s3.py +++ b/rootfs/patcher-script.d/patch_wal_e_s3.py @@ -7,10 +7,10 @@ def wrap_uri_put_file(creds, uri, fp, content_type=None, conn=None): k = s3_util._uri_to_key(creds, uri, conn=conn) if content_type is not None: k.content_type = content_type + encrypt_key = False if os.getenv('DATABASE_STORAGE') == 's3': - encrypt_key=True - else: - encrypt_key=False + if os.getenv('WALE_S3_SSE', 'false') == 'true': + encrypt_key = True k.set_contents_from_file(fp, encrypt_key=encrypt_key) return k s3.uri_put_file = wrap_uri_put_file