Skip to content

Option to Allow skipping bucket validation #287

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion mongodb_consistent_backup/Upload/Rsync/Rsync.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def init(self):
def rsync_info(self):
if not self._rsync_info:
output = check_output([self.rsync_binary, "--version"])
search = re.search("^rsync\s+version\s([0-9.-]+)\s+protocol\sversion\s(\d+)", output)
search = re.search(r"^rsync\s+version\s([0-9.-]+)\s+protocol\sversion\s(\d+)", output)
self.rsync_version = search.group(1)
self._rsync_info = {"version": self.rsync_version, "protocol_version": int(search.group(2))}
return self._rsync_info
Expand Down
4 changes: 3 additions & 1 deletion mongodb_consistent_backup/Upload/S3/S3.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ def __init__(self, manager, config, timer, base_dir, backup_dir, **kwargs):
self.chunk_size = self.chunk_size_mb * 1024 * 1024
self.s3_acl = self.config.upload.s3.acl
self.key_prefix = base_dir
self.validate_bucket = not self.config.upload.s3.skip_bucket_validation

self.threads(self.config.upload.threads)
self._pool = None
Expand All @@ -38,7 +39,8 @@ def __init__(self, manager, config, timer, base_dir, backup_dir, **kwargs):
self.threads(),
self.remove_uploaded,
self.chunk_size,
self.s3_acl
self.s3_acl,
validate_bucket=self.validate_bucket
)

def get_key_name(self, file_path):
Expand Down
23 changes: 13 additions & 10 deletions mongodb_consistent_backup/Upload/S3/S3Session.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@


class S3Session:
def __init__(self, region, access_key, secret_key, bucket_name, secure=True, num_retries=5, socket_timeout=15):
self.region = region
self.access_key = access_key
self.secret_key = secret_key
self.secure = secure
self.num_retries = num_retries
self.socket_timeout = socket_timeout

def __init__(self, region, access_key, secret_key, bucket_name, secure=True, num_retries=5, socket_timeout=15,
**kwargs):
self.region = region
self.access_key = access_key
self.secret_key = secret_key
self.secure = secure
self.num_retries = num_retries
self.socket_timeout = socket_timeout
self.validate_bucket = kwargs.get("validate_bucket")
# monkey patch for bucket_name with dots
# https://github.com/boto/boto/issues/2836
if self.secure and '.' in bucket_name:
Expand Down Expand Up @@ -77,8 +78,10 @@ def connect(self):

def get_bucket(self, bucket_name):
try:
logging.debug("Connecting to AWS S3 Bucket: %s" % bucket_name)
return self._conn.get_bucket(bucket_name)
logging.debug("Connecting to AWS S3 Bucket: %s (%s validation)" % (bucket_name,
"with" if self.validate_bucket
else "without"))
return self._conn.get_bucket(bucket_name, validate=self.validate_bucket)
except boto.exception.S3ResponseError, e:
if self.is_forbidden_error(e):
logging.error("Got forbidden error from AWS S3 for bucket %s! Please check your access/secret key" % bucket_name)
Expand Down
6 changes: 4 additions & 2 deletions mongodb_consistent_backup/Upload/S3/S3UploadPool.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def _reduce_method(m):


class S3UploadPool():
def __init__(self, bucket_name, region, access_key, secret_key, threads=4, remove_uploaded=False, chunk_bytes=50 * 1024 * 1024, key_acl=None):
def __init__(self, bucket_name, region, access_key, secret_key, threads=4, remove_uploaded=False, chunk_bytes=50 * 1024 * 1024, key_acl=None, **kwargs):
self.bucket_name = bucket_name
self.region = region
self.access_key = access_key
Expand All @@ -37,6 +37,7 @@ def __init__(self, bucket_name, region, access_key, secret_key, threads=4, remov
self.remove_uploaded = remove_uploaded
self.chunk_bytes = chunk_bytes
self.key_acl = key_acl
self.validate_bucket = kwargs.get("validate_bucket")

self.multipart_min_bytes = 5242880

Expand All @@ -46,7 +47,8 @@ def __init__(self, bucket_name, region, access_key, secret_key, threads=4, remov
self._pool = Pool(processes=self.threads)

try:
self.s3_conn = S3Session(self.region, self.access_key, self.secret_key, self.bucket_name)
self.s3_conn = S3Session(self.region, self.access_key, self.secret_key, self.bucket_name,
validate_bucket=self.validate_bucket)
self.bucket = self.s3_conn.get_bucket(self.bucket_name)
except Exception, e:
raise OperationError(e)
Expand Down
4 changes: 4 additions & 0 deletions mongodb_consistent_backup/Upload/S3/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ def config(parser):
help="S3 Uploader AWS Secret Key (required for S3 upload)")
parser.add_argument("--upload.s3.bucket_name", dest="upload.s3.bucket_name", type=str,
help="S3 Uploader destination bucket name")
parser.add_argument("--upload.s3.skip_bucket_validation", dest="upload.s3.skip_bucket_validation", default=False,
action="store_true",
help="S3 Upload will check upfront if the bucket exists. Skip this check if bucket "
"permissions don't allow access to the bucket's root. (default: false)")
parser.add_argument("--upload.s3.bucket_prefix", dest="upload.s3.bucket_prefix", type=str,
help="S3 Uploader destination bucket path prefix")
parser.add_argument("--upload.s3.bucket_explicit_key", dest="upload.s3.bucket_explicit_key", type=str,
Expand Down