Add lock for backups directory

This commit is contained in:
Maks Snegov 2021-11-12 11:30:11 +03:00
parent c64955362a
commit 1cc5271c29
2 changed files with 39 additions and 17 deletions

17
main.py
View File

@ -5,6 +5,7 @@ import logging
import os.path
import shutil
import sys
import time
import spqr.curateipsum.backup as backup
@ -35,6 +36,10 @@ def main():
action="store_true",
default=False,
help="Do not do create backup")
parser.add_argument("-f", "--force",
action="store_true",
default=False,
help="Force run when previous backup is still in process")
parser.add_argument("--external-rsync",
action="store_true",
default=False,
@ -80,8 +85,13 @@ def main():
_lg.error("Source directory %s does not exist", src_dir)
return 1
backup.cleanup_old_backups(backup_dir=backup_dir_abs, dry_run=args.dry_run)
start_time = time.time()
if not backup.set_backups_lock(backup_dir_abs, args.force):
_lg.warning("Previous backup is still in process, exiting")
return 1
backup.cleanup_old_backups(backup_dir=backup_dir_abs, dry_run=args.dry_run)
backup.initiate_backup(
sources=args.sources,
backup_dir=backup_dir_abs,
@ -90,6 +100,11 @@ def main():
external_hardlink=args.external_hardlink,
)
backup.release_backups_lock(backup_dir_abs)
end_time = time.time()
spent_time = end_time - start_time
_lg.info("Finished, time spent: %.3fs", spent_time)
if __name__ == "__main__":
sys.exit(main())

View File

@ -5,14 +5,14 @@ Module with backup functions.
import logging
import os
import shutil
import time
from datetime import datetime, timedelta
from typing import Optional, Iterable
import spqr.curateipsum.fs as fs
BACKUP_ENT_FMT = "%Y%m%d_%H%M"
DELTA_DIR = "_delta"
BACKUP_ENT_FMT = "%Y%m%d_%H%M%S"
LOCK_FILE = ".backups_lock"
DELTA_DIR = ".backup_delta"
_lg = logging.getLogger(__name__)
@ -55,6 +55,24 @@ def _date_from_backup(backup: os.DirEntry) -> datetime:
return datetime.strptime(backup.name, BACKUP_ENT_FMT)
def set_backups_lock(backup_dir: str, force: bool = False) -> bool:
""" Return false if previous backup is still running. """
lock_file_path = os.path.join(backup_dir, LOCK_FILE)
if os.path.exists(lock_file_path):
if not force:
return False
os.unlink(lock_file_path)
open(lock_file_path, "a").close()
return True
def release_backups_lock(backup_dir: str):
lock_file_path = os.path.join(backup_dir, LOCK_FILE)
if os.path.exists(lock_file_path):
os.unlink(lock_file_path)
def cleanup_old_backups(
backup_dir: str,
dry_run: bool = False,
@ -161,8 +179,8 @@ def cleanup_old_backups(
to_remove[backup] = True
for backup, do_delete in to_remove.items():
_lg.info("Removing old backup %s", backup.name)
if not dry_run and do_delete:
_lg.info("Removing old backup %s", backup.name)
shutil.rmtree(backup.path)
@ -180,8 +198,7 @@ def initiate_backup(sources,
external_hardlink: bool = False):
""" Main backup function """
start_time = time.time()
start_time_fmt = datetime.fromtimestamp(start_time).strftime(BACKUP_ENT_FMT)
start_time_fmt = datetime.now().strftime(BACKUP_ENT_FMT)
cur_backup = fs.PseudoDirEntry(os.path.join(backup_dir, start_time_fmt))
_lg.debug("Current backup dir: %s", cur_backup.path)
@ -193,12 +210,6 @@ def initiate_backup(sources,
os.mkdir(cur_backup.path)
else:
# TODO check last backup is finalized
if cur_backup.name == latest_backup.name:
_lg.warning("Latest backup %s was created less than minute ago, exiting",
latest_backup.name)
return
_lg.info("Copying data from latest backup %s to current backup %s",
latest_backup.name, cur_backup.name)
@ -242,7 +253,3 @@ def initiate_backup(sources,
shutil.rmtree(cur_backup.path, ignore_errors=True)
else:
_lg.info("Backup created: %s", cur_backup.name)
end_time = time.time()
spend_time = end_time - start_time
_lg.info("Finished, time spent: %.3fs", spend_time)