Minor fixes
This commit is contained in:
parent
84546243cb
commit
a927f3e2d3
@ -8,7 +8,7 @@ import shutil
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Optional, Iterable
|
from typing import Optional, Iterable
|
||||||
|
|
||||||
import curateipsum.fs as fs
|
from curateipsum import fs
|
||||||
|
|
||||||
BACKUP_ENT_FMT = "%Y%m%d_%H%M%S"
|
BACKUP_ENT_FMT = "%Y%m%d_%H%M%S"
|
||||||
LOCK_FILE = ".backups_lock"
|
LOCK_FILE = ".backups_lock"
|
||||||
@ -103,7 +103,7 @@ def cleanup_old_backups(
|
|||||||
if not all_backups:
|
if not all_backups:
|
||||||
_lg.debug("No backups, exiting")
|
_lg.debug("No backups, exiting")
|
||||||
return
|
return
|
||||||
elif len(all_backups) == 1:
|
if len(all_backups) == 1:
|
||||||
_lg.debug("Only one backup (%s) exists, will not remove it",
|
_lg.debug("Only one backup (%s) exists, will not remove it",
|
||||||
all_backups[0].name)
|
all_backups[0].name)
|
||||||
return
|
return
|
||||||
@ -186,7 +186,7 @@ def cleanup_old_backups(
|
|||||||
|
|
||||||
def process_backed_entry(backup_dir: str, entry_relpath: str, action: fs.Actions):
|
def process_backed_entry(backup_dir: str, entry_relpath: str, action: fs.Actions):
|
||||||
_lg.debug("%s %s", action, entry_relpath)
|
_lg.debug("%s %s", action, entry_relpath)
|
||||||
if action is not fs.Actions.delete:
|
if action is not fs.Actions.DELETE:
|
||||||
fs.nest_hardlink(src_dir=backup_dir, src_relpath=entry_relpath,
|
fs.nest_hardlink(src_dir=backup_dir, src_relpath=entry_relpath,
|
||||||
dst_dir=os.path.join(backup_dir, DELTA_DIR))
|
dst_dir=os.path.join(backup_dir, DELTA_DIR))
|
||||||
|
|
||||||
|
|||||||
@ -14,13 +14,13 @@ _lg = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class Actions(enum.Enum):
|
class Actions(enum.Enum):
|
||||||
nothing = enum.auto()
|
NOTHING = enum.auto()
|
||||||
delete = enum.auto()
|
DELETE = enum.auto()
|
||||||
rewrite = enum.auto()
|
REWRITE = enum.auto()
|
||||||
update_time = enum.auto()
|
UPDATE_TIME = enum.auto()
|
||||||
update_perm = enum.auto()
|
UPDATE_PERM = enum.auto()
|
||||||
update_owner = enum.auto()
|
UPDATE_OWNER = enum.auto()
|
||||||
create = enum.auto()
|
CREATE = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
class PseudoDirEntry:
|
class PseudoDirEntry:
|
||||||
@ -48,24 +48,24 @@ def _parse_rsync_output(line: str) -> Tuple[str, Actions]:
|
|||||||
action = None
|
action = None
|
||||||
change_string, relpath = line.split(' ', maxsplit=1)
|
change_string, relpath = line.split(' ', maxsplit=1)
|
||||||
if change_string == "*deleting":
|
if change_string == "*deleting":
|
||||||
return relpath, Actions.delete
|
return relpath, Actions.DELETE
|
||||||
|
|
||||||
update_type = change_string[0]
|
update_type = change_string[0]
|
||||||
entity_type = change_string[1]
|
entity_type = change_string[1]
|
||||||
change_type = change_string[2:]
|
change_type = change_string[2:]
|
||||||
|
|
||||||
if update_type == "c" and entity_type in {"d", "L"} and "+" in change_type:
|
if update_type == "c" and entity_type in {"d", "L"} and "+" in change_type:
|
||||||
action = Actions.create
|
action = Actions.CREATE
|
||||||
elif update_type == ">" and entity_type == "f" and "+" in change_type:
|
elif update_type == ">" and entity_type == "f" and "+" in change_type:
|
||||||
action = Actions.create
|
action = Actions.CREATE
|
||||||
elif entity_type == "f" and ("s" in change_type or "t" in change_type):
|
elif entity_type == "f" and ("s" in change_type or "t" in change_type):
|
||||||
action = Actions.rewrite
|
action = Actions.REWRITE
|
||||||
elif entity_type == "d" and "t" in change_type:
|
elif entity_type == "d" and "t" in change_type:
|
||||||
action = Actions.update_time
|
action = Actions.UPDATE_TIME
|
||||||
elif "p" in change_type:
|
elif "p" in change_type:
|
||||||
action = Actions.update_perm
|
action = Actions.UPDATE_PERM
|
||||||
elif "o" in change_type or "g" in change_type:
|
elif "o" in change_type or "g" in change_type:
|
||||||
action = Actions.update_owner
|
action = Actions.UPDATE_OWNER
|
||||||
|
|
||||||
if action is None:
|
if action is None:
|
||||||
raise RuntimeError("Not parsed string: %s" % line)
|
raise RuntimeError("Not parsed string: %s" % line)
|
||||||
@ -123,7 +123,6 @@ def rsync_ext(src, dst, dry_run=False):
|
|||||||
def scantree(path, dir_first=True) -> Iterable[os.DirEntry]:
|
def scantree(path, dir_first=True) -> Iterable[os.DirEntry]:
|
||||||
"""Recursively yield DirEntry file objects for given directory."""
|
"""Recursively yield DirEntry file objects for given directory."""
|
||||||
entry: os.DirEntry
|
entry: os.DirEntry
|
||||||
"""Recursively yield DirEntry objects for given directory."""
|
|
||||||
with os.scandir(path) as scan_it:
|
with os.scandir(path) as scan_it:
|
||||||
for entry in scan_it:
|
for entry in scan_it:
|
||||||
if entry.is_dir(follow_symlinks=False):
|
if entry.is_dir(follow_symlinks=False):
|
||||||
@ -281,7 +280,7 @@ def rsync(src_dir, dst_dir, dry_run=False) -> Iterable[tuple]:
|
|||||||
if src_entry is None:
|
if src_entry is None:
|
||||||
_lg.debug("Rsync, deleting: %s", rel_path)
|
_lg.debug("Rsync, deleting: %s", rel_path)
|
||||||
rm_direntry(dst_entry)
|
rm_direntry(dst_entry)
|
||||||
yield rel_path, Actions.delete
|
yield rel_path, Actions.DELETE
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# mark src entry as taken for processing
|
# mark src entry as taken for processing
|
||||||
@ -294,28 +293,28 @@ def rsync(src_dir, dst_dir, dry_run=False) -> Iterable[tuple]:
|
|||||||
_lg.debug("Rsync, rewriting (src is a file, dst is not a file): %s",
|
_lg.debug("Rsync, rewriting (src is a file, dst is not a file): %s",
|
||||||
rel_path)
|
rel_path)
|
||||||
update_direntry(src_entry, dst_entry)
|
update_direntry(src_entry, dst_entry)
|
||||||
yield rel_path, Actions.rewrite
|
yield rel_path, Actions.REWRITE
|
||||||
continue
|
continue
|
||||||
if src_entry.is_dir(follow_symlinks=False):
|
if src_entry.is_dir(follow_symlinks=False):
|
||||||
if not dst_entry.is_dir(follow_symlinks=False):
|
if not dst_entry.is_dir(follow_symlinks=False):
|
||||||
_lg.debug("Rsync, rewriting (src is a dir, dst is not a dir): %s",
|
_lg.debug("Rsync, rewriting (src is a dir, dst is not a dir): %s",
|
||||||
rel_path)
|
rel_path)
|
||||||
update_direntry(src_entry, dst_entry)
|
update_direntry(src_entry, dst_entry)
|
||||||
yield rel_path, Actions.rewrite
|
yield rel_path, Actions.REWRITE
|
||||||
continue
|
continue
|
||||||
if src_entry.is_symlink():
|
if src_entry.is_symlink():
|
||||||
if not dst_entry.is_symlink():
|
if not dst_entry.is_symlink():
|
||||||
_lg.debug("Rsync, rewriting (src is a symlink, dst is not a symlink): %s",
|
_lg.debug("Rsync, rewriting (src is a symlink, dst is not a symlink): %s",
|
||||||
rel_path)
|
rel_path)
|
||||||
update_direntry(src_entry, dst_entry)
|
update_direntry(src_entry, dst_entry)
|
||||||
yield rel_path, Actions.rewrite
|
yield rel_path, Actions.REWRITE
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# rewrite dst if it is hard link to src (bad for backups)
|
# rewrite dst if it is hard link to src (bad for backups)
|
||||||
if src_entry.inode() == dst_entry.inode():
|
if src_entry.inode() == dst_entry.inode():
|
||||||
_lg.debug("Rsync, rewriting (different inodes): %s", rel_path)
|
_lg.debug("Rsync, rewriting (different inodes): %s", rel_path)
|
||||||
update_direntry(src_entry, dst_entry)
|
update_direntry(src_entry, dst_entry)
|
||||||
yield rel_path, Actions.rewrite
|
yield rel_path, Actions.REWRITE
|
||||||
continue
|
continue
|
||||||
|
|
||||||
src_stat = src_entry.stat(follow_symlinks=False)
|
src_stat = src_entry.stat(follow_symlinks=False)
|
||||||
@ -329,7 +328,7 @@ def rsync(src_dir, dst_dir, dry_run=False) -> Iterable[tuple]:
|
|||||||
reason = "size" if not same_size else "time"
|
reason = "size" if not same_size else "time"
|
||||||
_lg.debug("Rsync, rewriting (different %s): %s", reason, rel_path)
|
_lg.debug("Rsync, rewriting (different %s): %s", reason, rel_path)
|
||||||
update_direntry(src_entry, dst_entry)
|
update_direntry(src_entry, dst_entry)
|
||||||
yield rel_path, Actions.rewrite
|
yield rel_path, Actions.REWRITE
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# rewrite dst symlink if it points somewhere else than src
|
# rewrite dst symlink if it points somewhere else than src
|
||||||
@ -342,12 +341,12 @@ def rsync(src_dir, dst_dir, dry_run=False) -> Iterable[tuple]:
|
|||||||
# update permissions and ownership
|
# update permissions and ownership
|
||||||
if src_stat.st_mode != dst_stat.st_mode:
|
if src_stat.st_mode != dst_stat.st_mode:
|
||||||
_lg.debug("Rsync, updating permissions: %s", rel_path)
|
_lg.debug("Rsync, updating permissions: %s", rel_path)
|
||||||
yield rel_path, Actions.update_perm
|
yield rel_path, Actions.UPDATE_PERM
|
||||||
os.chmod(dst_entry.path, dst_stat.st_mode)
|
os.chmod(dst_entry.path, dst_stat.st_mode)
|
||||||
|
|
||||||
if src_stat.st_uid != dst_stat.st_uid or src_stat.st_gid != dst_stat.st_gid:
|
if src_stat.st_uid != dst_stat.st_uid or src_stat.st_gid != dst_stat.st_gid:
|
||||||
_lg.debug("Rsync, updating owners: %s", rel_path)
|
_lg.debug("Rsync, updating owners: %s", rel_path)
|
||||||
yield rel_path, Actions.update_owner
|
yield rel_path, Actions.UPDATE_OWNER
|
||||||
os.chown(dst_entry.path, src_stat.st_uid, src_stat.st_gid)
|
os.chown(dst_entry.path, src_stat.st_uid, src_stat.st_gid)
|
||||||
|
|
||||||
# process remained source entries
|
# process remained source entries
|
||||||
@ -355,7 +354,7 @@ def rsync(src_dir, dst_dir, dry_run=False) -> Iterable[tuple]:
|
|||||||
dst_path = os.path.join(dst_root_abs, rel_path)
|
dst_path = os.path.join(dst_root_abs, rel_path)
|
||||||
_lg.debug("Rsync, creating: %s", rel_path)
|
_lg.debug("Rsync, creating: %s", rel_path)
|
||||||
copy_direntry(src_entry, dst_path)
|
copy_direntry(src_entry, dst_path)
|
||||||
yield rel_path, Actions.create
|
yield rel_path, Actions.CREATE
|
||||||
|
|
||||||
# restore dir mtimes in dst, updated by updating files
|
# restore dir mtimes in dst, updated by updating files
|
||||||
for src_entry in scantree(src_root_abs, dir_first=True):
|
for src_entry in scantree(src_root_abs, dir_first=True):
|
||||||
|
|||||||
@ -7,7 +7,7 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import curateipsum.backup as backup
|
from curateipsum import backup
|
||||||
|
|
||||||
_lg = logging.getLogger("spqr.curateipsum")
|
_lg = logging.getLogger("spqr.curateipsum")
|
||||||
SUPPORTED_PLATFORMS = ("linux", "darwin")
|
SUPPORTED_PLATFORMS = ("linux", "darwin")
|
||||||
@ -21,7 +21,7 @@ def main():
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
prog="cura-te-ipsum", description="cura-te-ipsum, my personal backup software.",
|
prog="cura-te-ipsum", description="cura-te-ipsum, my personal backup software.",
|
||||||
)
|
)
|
||||||
parser.add_argument("-V", "--version", action="version", version="%(prog)s 0.1")
|
parser.add_argument("-V", "--version", action="version", version="%(prog)s 0.0.1")
|
||||||
parser.add_argument("-v", "--verbose",
|
parser.add_argument("-v", "--verbose",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
@ -105,6 +105,8 @@ def main():
|
|||||||
spent_time = end_time - start_time
|
spent_time = end_time - start_time
|
||||||
_lg.info("Finished, time spent: %.3fs", spent_time)
|
_lg.info("Finished, time spent: %.3fs", spent_time)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|||||||
2
setup.py
2
setup.py
@ -5,7 +5,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
|
|||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
name="cura-te-ipsum",
|
name="cura-te-ipsum",
|
||||||
version="0.0.1.dev6",
|
version="0.0.1.dev7",
|
||||||
author="Maks Snegov",
|
author="Maks Snegov",
|
||||||
author_email="snegov@spqr.link",
|
author_email="snegov@spqr.link",
|
||||||
description="Backup utility",
|
description="Backup utility",
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user