chore(lint): Run black
This commit is contained in:
@@ -18,7 +18,7 @@ def get_file_ext():
|
||||
"database": "-database.sql.gz",
|
||||
"private_files": "-private-files.tar",
|
||||
"public_files": "-files.tar",
|
||||
"site_config": "-site_config_backup.json"
|
||||
"site_config": "-site_config_backup.json",
|
||||
}
|
||||
|
||||
|
||||
@@ -31,19 +31,26 @@ def get_backup_details(sitename):
|
||||
|
||||
if os.path.exists(site_backup_path):
|
||||
for filetype, ext in file_ext.items():
|
||||
site_slug = sitename.replace('.', '_')
|
||||
pattern = site_backup_path + '*-' + site_slug + ext
|
||||
site_slug = sitename.replace(".", "_")
|
||||
pattern = site_backup_path + "*-" + site_slug + ext
|
||||
backup_files = list(filter(os.path.isfile, glob(pattern)))
|
||||
|
||||
if len(backup_files) > 0:
|
||||
backup_files.sort(key=lambda file: os.stat(os.path.join(site_backup_path, file)).st_ctime)
|
||||
backup_date = datetime.datetime.strptime(time.ctime(os.path.getmtime(backup_files[0])), "%a %b %d %H:%M:%S %Y")
|
||||
backup_files.sort(
|
||||
key=lambda file: os.stat(
|
||||
os.path.join(site_backup_path, file)
|
||||
).st_ctime
|
||||
)
|
||||
backup_date = datetime.datetime.strptime(
|
||||
time.ctime(os.path.getmtime(backup_files[0])),
|
||||
"%a %b %d %H:%M:%S %Y",
|
||||
)
|
||||
backup_details[filetype] = {
|
||||
"sitename": sitename,
|
||||
"file_size_in_bytes": os.stat(backup_files[-1]).st_size,
|
||||
"file_path": os.path.abspath(backup_files[-1]),
|
||||
"filename": os.path.basename(backup_files[-1]),
|
||||
"backup_date": backup_date.date().strftime("%Y-%m-%d %H:%M:%S")
|
||||
"backup_date": backup_date.date().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
}
|
||||
|
||||
return backup_details
|
||||
@@ -54,31 +61,34 @@ def delete_old_backups(limit, bucket, site_name):
|
||||
all_backup_dates = list()
|
||||
backup_limit = int(limit)
|
||||
check_s3_environment_variables()
|
||||
bucket_dir = os.environ.get('BUCKET_DIR')
|
||||
bucket_dir = os.environ.get("BUCKET_DIR")
|
||||
oldest_backup_date = None
|
||||
|
||||
s3 = boto3.resource(
|
||||
's3',
|
||||
region_name=os.environ.get('REGION'),
|
||||
aws_access_key_id=os.environ.get('ACCESS_KEY_ID'),
|
||||
aws_secret_access_key=os.environ.get('SECRET_ACCESS_KEY'),
|
||||
endpoint_url=os.environ.get('ENDPOINT_URL')
|
||||
"s3",
|
||||
region_name=os.environ.get("REGION"),
|
||||
aws_access_key_id=os.environ.get("ACCESS_KEY_ID"),
|
||||
aws_secret_access_key=os.environ.get("SECRET_ACCESS_KEY"),
|
||||
endpoint_url=os.environ.get("ENDPOINT_URL"),
|
||||
)
|
||||
|
||||
bucket = s3.Bucket(bucket)
|
||||
objects = bucket.meta.client.list_objects_v2(
|
||||
Bucket=bucket.name,
|
||||
Delimiter='/')
|
||||
objects = bucket.meta.client.list_objects_v2(Bucket=bucket.name, Delimiter="/")
|
||||
|
||||
if objects:
|
||||
for obj in objects.get('CommonPrefixes'):
|
||||
if obj.get('Prefix') == bucket_dir + '/':
|
||||
for backup_obj in bucket.objects.filter(Prefix=obj.get('Prefix')):
|
||||
for obj in objects.get("CommonPrefixes"):
|
||||
if obj.get("Prefix") == bucket_dir + "/":
|
||||
for backup_obj in bucket.objects.filter(Prefix=obj.get("Prefix")):
|
||||
if backup_obj.get()["ContentType"] == "application/x-directory":
|
||||
continue
|
||||
try:
|
||||
# backup_obj.key is bucket_dir/site/date_time/backupfile.extension
|
||||
bucket_dir, site_slug, date_time, backupfile = backup_obj.key.split('/')
|
||||
(
|
||||
bucket_dir,
|
||||
site_slug,
|
||||
date_time,
|
||||
backupfile,
|
||||
) = backup_obj.key.split("/")
|
||||
date_time_object = datetime.datetime.strptime(
|
||||
date_time, DATE_FORMAT
|
||||
)
|
||||
@@ -98,7 +108,7 @@ def delete_old_backups(limit, bucket, site_name):
|
||||
for backup in all_backups:
|
||||
try:
|
||||
# backup is bucket_dir/site/date_time/backupfile.extension
|
||||
backup_dir, site_slug, backup_dt_string, filename = backup.split('/')
|
||||
backup_dir, site_slug, backup_dt_string, filename = backup.split("/")
|
||||
backup_datetime = datetime.datetime.strptime(
|
||||
backup_dt_string, DATE_FORMAT
|
||||
)
|
||||
@@ -113,7 +123,7 @@ def delete_old_backups(limit, bucket, site_name):
|
||||
for obj in bucket.objects.filter(Prefix=oldest_backup):
|
||||
# delete all keys that are inside the oldest_backup
|
||||
if bucket_dir in obj.key:
|
||||
print('Deleteing ' + obj.key)
|
||||
print("Deleteing " + obj.key)
|
||||
s3.Object(bucket.name, obj.key).delete()
|
||||
|
||||
|
||||
@@ -124,31 +134,52 @@ def main():
|
||||
|
||||
for site in sites:
|
||||
details = get_backup_details(site)
|
||||
db_file = details.get('database', {}).get('file_path')
|
||||
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/'
|
||||
db_file = details.get("database", {}).get("file_path")
|
||||
folder = os.environ.get("BUCKET_DIR") + "/" + site + "/"
|
||||
if db_file:
|
||||
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(db_file)[:15] + '/'
|
||||
folder = (
|
||||
os.environ.get("BUCKET_DIR")
|
||||
+ "/"
|
||||
+ site
|
||||
+ "/"
|
||||
+ os.path.basename(db_file)[:15]
|
||||
+ "/"
|
||||
)
|
||||
upload_file_to_s3(db_file, folder, conn, bucket)
|
||||
|
||||
# Archive site_config.json
|
||||
site_config_file = details.get('site_config', {}).get('file_path')
|
||||
site_config_file = details.get("site_config", {}).get("file_path")
|
||||
if not site_config_file:
|
||||
site_config_file = os.path.join(os.getcwd(), site, 'site_config.json')
|
||||
site_config_file = os.path.join(os.getcwd(), site, "site_config.json")
|
||||
upload_file_to_s3(site_config_file, folder, conn, bucket)
|
||||
|
||||
public_files = details.get('public_files', {}).get('file_path')
|
||||
public_files = details.get("public_files", {}).get("file_path")
|
||||
if public_files:
|
||||
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(public_files)[:15] + '/'
|
||||
folder = (
|
||||
os.environ.get("BUCKET_DIR")
|
||||
+ "/"
|
||||
+ site
|
||||
+ "/"
|
||||
+ os.path.basename(public_files)[:15]
|
||||
+ "/"
|
||||
)
|
||||
upload_file_to_s3(public_files, folder, conn, bucket)
|
||||
|
||||
private_files = details.get('private_files', {}).get('file_path')
|
||||
private_files = details.get("private_files", {}).get("file_path")
|
||||
if private_files:
|
||||
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(private_files)[:15] + '/'
|
||||
folder = (
|
||||
os.environ.get("BUCKET_DIR")
|
||||
+ "/"
|
||||
+ site
|
||||
+ "/"
|
||||
+ os.path.basename(private_files)[:15]
|
||||
+ "/"
|
||||
)
|
||||
upload_file_to_s3(private_files, folder, conn, bucket)
|
||||
|
||||
delete_old_backups(os.environ.get('BACKUP_LIMIT', '3'), bucket, site)
|
||||
delete_old_backups(os.environ.get("BACKUP_LIMIT", "3"), bucket, site)
|
||||
|
||||
print('push-backup complete')
|
||||
print("push-backup complete")
|
||||
exit(0)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user