2021-11-12 02:53:21 +03:00
|
|
|
import os
|
|
|
|
|
import random
|
|
|
|
|
import string
|
|
|
|
|
import tempfile
|
|
|
|
|
from unittest import TestCase, mock
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
|
2021-11-15 00:58:59 +03:00
|
|
|
from curateipsum import backup as bk, fs
|
2021-11-12 02:53:21 +03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestBackupCleanup(TestCase):
|
|
|
|
|
def setUp(self) -> None:
|
|
|
|
|
self.backup_dir = tempfile.TemporaryDirectory(prefix="backup_")
|
|
|
|
|
|
|
|
|
|
def tearDown(self) -> None:
|
|
|
|
|
self.backup_dir.cleanup()
|
|
|
|
|
|
|
|
|
|
def _add_backup(self, backup_name: str) -> fs.PseudoDirEntry:
|
|
|
|
|
backup = fs.PseudoDirEntry(os.path.join(self.backup_dir.name, backup_name))
|
|
|
|
|
os.mkdir(backup.path)
|
2023-05-06 23:06:15 -07:00
|
|
|
bk.set_backup_marker(backup)
|
2021-11-12 02:53:21 +03:00
|
|
|
|
|
|
|
|
fd, path = tempfile.mkstemp(prefix="backup_file_", dir=backup.path)
|
|
|
|
|
with open(fd, "w") as f:
|
|
|
|
|
f.write(''.join(random.choices(string.printable, k=128)))
|
|
|
|
|
return backup
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def _check_backup_not_empty(backup: fs.PseudoDirEntry) -> bool:
|
|
|
|
|
return bool(os.listdir(backup.path))
|
|
|
|
|
|
|
|
|
|
def _check_backups(self, expected_backups):
|
|
|
|
|
backups_list = os.listdir(self.backup_dir.name)
|
|
|
|
|
self.assertEqual(sorted(b.name for b in expected_backups),
|
|
|
|
|
sorted(backups_list))
|
|
|
|
|
for b in expected_backups:
|
|
|
|
|
self.assertTrue(self._check_backup_not_empty(b))
|
|
|
|
|
|
|
|
|
|
def _run_cleanup(self, **kwargs):
|
|
|
|
|
""" Run cleanup_old_backups with null parameters. """
|
|
|
|
|
cleanup_kwargs = {
|
2023-05-06 23:06:15 -07:00
|
|
|
"backups_dir": self.backup_dir.name,
|
2021-11-12 02:53:21 +03:00
|
|
|
"dry_run": False,
|
|
|
|
|
"keep_all": None,
|
|
|
|
|
"keep_daily": None,
|
|
|
|
|
"keep_weekly": None,
|
|
|
|
|
"keep_monthly": None,
|
|
|
|
|
"keep_yearly": None,
|
|
|
|
|
}
|
|
|
|
|
cleanup_kwargs.update(**kwargs)
|
|
|
|
|
bk.cleanup_old_backups(**cleanup_kwargs)
|
|
|
|
|
|
|
|
|
|
def test_no_backups(self):
|
|
|
|
|
""" Test behaviour with no available backups """
|
|
|
|
|
bk.cleanup_old_backups(self.backup_dir.name)
|
|
|
|
|
self.assertFalse(os.listdir(self.backup_dir.name))
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_only_one_backup(self, mock_datetime):
|
|
|
|
|
""" Test the only backup will not be removed in any case """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 10, 20)
|
2023-05-06 23:06:15 -07:00
|
|
|
# very old backup
|
2021-11-12 02:53:21 +03:00
|
|
|
only_backup = self._add_backup("20010101_0000")
|
|
|
|
|
self._run_cleanup(keep_all=1)
|
|
|
|
|
self._check_backups([only_backup])
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_at_least_one_should_be_left(self, mock_datetime):
|
|
|
|
|
""" Test at least one backup should be left """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 10, 20)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211103_0300"), # this one is the latest and should be kept
|
|
|
|
|
self._add_backup("20201216_0100"), # the rest should be removed
|
2021-11-12 02:53:21 +03:00
|
|
|
self._add_backup("20200716_0100"),
|
|
|
|
|
self._add_backup("20181116_0100"),
|
|
|
|
|
]
|
|
|
|
|
expected_backups = [backups[0]]
|
|
|
|
|
self._run_cleanup()
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_all_threshold_only(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping all backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 10, 20)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211019_0300"), # keep
|
|
|
|
|
self._add_backup("20211017_0100"), # keep
|
|
|
|
|
self._add_backup("20211016_2300"), # remove, older than 3 days
|
2021-11-12 02:53:21 +03:00
|
|
|
]
|
|
|
|
|
expected_backups = backups[:2]
|
|
|
|
|
self._run_cleanup(keep_all=3)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_daily_threshold_only(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping daily backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 10, 20)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211019_0300"), # keep, first daily backup at 2021-10-19
|
|
|
|
|
self._add_backup("20211017_2100"), # remove, not the first daily backup
|
|
|
|
|
self._add_backup("20211017_0100"), # remove, not the first daily backup
|
|
|
|
|
self._add_backup("20211017_0030"), # keep, first daily backup at 2021-10-17
|
|
|
|
|
self._add_backup("20211016_2300"), # remove, older than 3 days
|
|
|
|
|
self._add_backup("20211016_0100"), # remove, older than 3 days
|
2021-11-12 02:53:21 +03:00
|
|
|
]
|
|
|
|
|
expected_backups = [backups[0], backups[3]]
|
|
|
|
|
self._run_cleanup(keep_daily=3)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_all_and_daily_thresholds(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping all and daily backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 10, 20)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211019_0300"), # keep, newer than 3 days
|
|
|
|
|
self._add_backup("20211017_0200"), # keep, newer than 3 days
|
|
|
|
|
self._add_backup("20211017_0100"), # keep, newer than 3 days
|
|
|
|
|
self._add_backup("20211016_2300"), # remove, not the first daily backup
|
|
|
|
|
self._add_backup("20211016_2200"), # keep, the first daily backup at 2021-10-16
|
|
|
|
|
self._add_backup("20211015_2200"), # remove, not the first daily backup
|
|
|
|
|
self._add_backup("20211015_1500"), # remove, not the first daily backup
|
|
|
|
|
self._add_backup("20211015_0200"), # keep, the first daily backup at 2021-10-15
|
|
|
|
|
self._add_backup("20211014_2200"), # remove, older than 5 days
|
|
|
|
|
self._add_backup("20211014_2000"), # remove, older than 5 days
|
|
|
|
|
self._add_backup("20211014_1232"), # remove, older than 5 days
|
2021-11-12 02:53:21 +03:00
|
|
|
]
|
|
|
|
|
expected_backups = backups[0:3] + [backups[4]] + [backups[7]]
|
|
|
|
|
self._run_cleanup(keep_all=3, keep_daily=5)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_weekly_threshold_only(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping weekly backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 11, 11)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211111_0300"), # remove, not the first weekly backup (Thursday)
|
|
|
|
|
self._add_backup("20211110_0300"), # remove, not the first weekly backup (Wednesday)
|
|
|
|
|
self._add_backup("20211108_0100"), # keep, first weekly backup at 2021-11-08 (Monday)
|
|
|
|
|
self._add_backup("20211107_2300"), # remove, not the first weekly backup (Sunday)
|
|
|
|
|
self._add_backup("20211107_0100"), # keep, first weekly backup at 2021-11-07 (Sunday)
|
|
|
|
|
self._add_backup("20211031_0100"), # remove, not the first weekly backup (Sunday)
|
|
|
|
|
self._add_backup("20211025_0100"), # keep, first weekly backup at 2021-10-25 (Monday)
|
|
|
|
|
self._add_backup("20211024_0100"), # remove, not the first weekly backup (Sunday)
|
|
|
|
|
self._add_backup("20211023_0100"), # remove, not the first weekly backup (Saturday)
|
|
|
|
|
self._add_backup("20211022_0100"), # keep, first weekly backup at 2021-10-22 (Friday)
|
|
|
|
|
self._add_backup("20211008_0100"), # remove, not the first weekly backup (Friday)
|
|
|
|
|
self._add_backup("20211007_0100"), # remove, not the first weekly backup (Thursday)
|
|
|
|
|
self._add_backup("20211004_0100"), # keep, first weekly backup at 2021-10-04 (Monday)
|
|
|
|
|
self._add_backup("20211003_0100"), # remove, older than 5 weeks
|
|
|
|
|
self._add_backup("20211002_0100"), # remove, older than 5 weeks
|
|
|
|
|
|
2021-11-12 02:53:21 +03:00
|
|
|
]
|
|
|
|
|
expected_backups = [backups[2], backups[4], backups[6],
|
|
|
|
|
backups[9], backups[12]]
|
|
|
|
|
self._run_cleanup(keep_weekly=5)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
2023-05-06 23:06:15 -07:00
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_weekly_threshold_inclusive(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping weekly backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 11, 11)
|
|
|
|
|
backups = [
|
|
|
|
|
self._add_backup("20211111_0300"), # remove, not the first weekly backup (Thursday)
|
|
|
|
|
self._add_backup("20211110_0300"), # keep, first weekly backup (Wednesday)
|
|
|
|
|
self._add_backup("20211107_0100"), # remove, not the first weekly backup (Sunday)
|
|
|
|
|
self._add_backup("20211102_0100"), # keep, first weekly backup (Tuesday)
|
|
|
|
|
]
|
|
|
|
|
expected_backups = [backups[1], backups[3]]
|
|
|
|
|
self._run_cleanup(keep_weekly=5)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
2021-11-12 02:53:21 +03:00
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_monthly_threshold_only(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping monthly backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 11, 11)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211103_0300"), # keep, first monthly backup at 2021-11
|
|
|
|
|
self._add_backup("20211019_0300"), # remove, not the first monthly backup
|
|
|
|
|
self._add_backup("20211017_2100"), # remove, not the first monthly backup
|
|
|
|
|
self._add_backup("20211017_0100"), # keep, first monthly backup at 2021-10
|
|
|
|
|
self._add_backup("20210916_2300"), # remove, not the first monthly backup
|
|
|
|
|
self._add_backup("20210916_0100"), # keep, first monthly backup at 2021-09
|
|
|
|
|
self._add_backup("20210816_0100"), # remove, not the first monthly backup
|
|
|
|
|
self._add_backup("20210810_0000"), # keep, first monthly backup at 2021-08
|
|
|
|
|
self._add_backup("20210716_0100"), # remove, older than 3 months
|
|
|
|
|
self._add_backup("20210715_0100"), # remove, older than 3 months
|
2021-11-12 02:53:21 +03:00
|
|
|
]
|
|
|
|
|
expected_backups = [backups[0], backups[3], backups[5], backups[7]]
|
|
|
|
|
self._run_cleanup(keep_monthly=3)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_keep_yearly_threshold_only(self, mock_datetime):
|
|
|
|
|
""" Test threshold for keeping yearly backups """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 11, 11)
|
|
|
|
|
backups = [
|
2023-05-06 23:06:15 -07:00
|
|
|
self._add_backup("20211103_0300"), # remove, not the first yearly backup in 2021
|
|
|
|
|
self._add_backup("20210810_0000"), # remove, not the first yearly backup in 2021
|
|
|
|
|
self._add_backup("20210716_0100"), # keep, first yearly backup in 2021
|
|
|
|
|
self._add_backup("20201216_0100"), # remove, not the first yearly backup in 2020
|
|
|
|
|
self._add_backup("20200716_0100"), # keep, first yearly backup in 2020
|
|
|
|
|
self._add_backup("20191216_0100"), # remove, not the first yearly backup in 2019
|
|
|
|
|
self._add_backup("20190316_0100"), # keep, first yearly backup in 2019
|
|
|
|
|
self._add_backup("20181216_0100"), # remove, not the first yearly backup in 2018
|
|
|
|
|
self._add_backup("20181116_0100"), # keep, first yearly backup in 2018
|
|
|
|
|
self._add_backup("20171116_0100"), # remove, older than 3 years
|
|
|
|
|
self._add_backup("20171115_0100"), # remove, older than 3 years
|
2021-11-12 02:53:21 +03:00
|
|
|
]
|
2023-05-06 23:06:15 -07:00
|
|
|
expected_backups = [backups[2], backups[4], backups[6], backups[8]]
|
2021-11-12 02:53:21 +03:00
|
|
|
self._run_cleanup(keep_yearly=3)
|
|
|
|
|
self._check_backups(expected_backups)
|
|
|
|
|
|
|
|
|
|
@mock.patch(f"{bk.__name__}.datetime", wraps=datetime)
|
|
|
|
|
def test_dry_run(self, mock_datetime):
|
|
|
|
|
""" Test dry run does not remove anything """
|
|
|
|
|
mock_datetime.now.return_value = datetime(2021, 11, 11)
|
|
|
|
|
backups = [
|
|
|
|
|
self._add_backup("20211103_0300"),
|
|
|
|
|
self._add_backup("20210810_0000"),
|
|
|
|
|
self._add_backup("20210716_0100"),
|
|
|
|
|
self._add_backup("20200716_0100"),
|
|
|
|
|
self._add_backup("20181116_0100"),
|
|
|
|
|
]
|
|
|
|
|
self._run_cleanup(keep_all=2, dry_run=True)
|
|
|
|
|
self._check_backups(backups)
|
2023-05-06 23:06:15 -07:00
|
|
|
|
|
|
|
|
|
Fix high-priority bugs and add comprehensive test coverage
This commit addresses 8 high-priority issues identified in code analysis.
Fixes #3
Fixes #4
Fixes #5
Fixes #7
Fixes #10
Fixes #19
Fixes #20
Fixes #21
## Critical Bug Fixes
1. **Race condition in lock file creation (#3)**
- Changed to atomic file creation using os.O_CREAT | os.O_EXCL
- Prevents two processes from both acquiring the lock
- Location: curateipsum/backup.py:110-115
2. **Invalid lock file error handling (#4)**
- Added try/except for corrupted/empty lock files
- Gracefully removes corrupted locks and retries
- Location: curateipsum/backup.py:121-133
3. **SIGKILL vs SIGTERM issue (#5)**
- Now sends SIGTERM first for graceful shutdown
- Waits 5 seconds before escalating to SIGKILL
- Allows previous process to clean up resources
- Location: curateipsum/backup.py:146-156
4. **Wrong stat object for permissions (#7)**
- Fixed bug where dst_stat was used instead of src_stat
- Permissions are now correctly updated during rsync
- Location: curateipsum/fs.py:371
5. **os.chown() fails for non-root users (#10)**
- Wrapped all os.chown() calls in try/except blocks
- Logs debug message instead of crashing
- Allows backups to succeed for non-root users
- Locations: curateipsum/fs.py:217-221, 228-231, 383-387, 469-472
## Comprehensive Test Coverage
6. **Lock file tests (#19)**
- Added TestBackupLock class with 7 test cases
- Tests: creation, concurrent prevention, stale locks, corruption
- Location: tests/test_backups.py:228-330
7. **Filesystem operation tests (#20)**
- Added tests/test_fs_extended.py with 6 test classes
- Tests: copy_file, copy_direntry, rsync, hardlink_dir, scantree, rm_direntry
- 20+ test cases covering normal and edge cases
- Location: tests/test_fs_extended.py
8. **Integration tests (#21)**
- Added tests/test_integration.py with 2 test classes
- Tests full backup workflow end-to-end
- Tests: incremental backups, hardlinks, delta dirs, cleanup, recovery
- 14 test cases covering complete backup lifecycle
- Location: tests/test_integration.py
## Test Results
All 68 tests pass successfully:
- 11 original backup cleanup tests
- 7 new lock file tests
- 16 original fs tests
- 20 new fs extended tests
- 14 new integration tests
## Impact
These fixes address critical bugs that could cause:
- Data corruption from concurrent backups
- Incomplete cleanup from forced process termination
- Permission sync failures
- Tool unusability for non-root users
The comprehensive test coverage ensures these bugs are caught early
and provides confidence for future refactoring.
2025-11-15 04:34:41 +00:00
|
|
|
class TestBackupLock(TestCase):
|
|
|
|
|
"""Test suite for backup lock file functionality."""
|
|
|
|
|
|
|
|
|
|
def setUp(self) -> None:
|
|
|
|
|
self.backup_dir = tempfile.TemporaryDirectory(prefix="backup_lock_")
|
|
|
|
|
|
|
|
|
|
def tearDown(self) -> None:
|
|
|
|
|
self.backup_dir.cleanup()
|
|
|
|
|
|
|
|
|
|
def test_lock_creation(self):
|
|
|
|
|
"""Test that lock file is created with current PID"""
|
|
|
|
|
result = bk.set_backups_lock(self.backup_dir.name)
|
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
self.assertTrue(os.path.exists(lock_path))
|
|
|
|
|
|
|
|
|
|
with open(lock_path, "r") as f:
|
|
|
|
|
pid = int(f.read().strip())
|
|
|
|
|
self.assertEqual(pid, os.getpid())
|
|
|
|
|
|
|
|
|
|
def test_lock_prevents_concurrent_backup(self):
|
|
|
|
|
"""Test that second lock acquisition is blocked"""
|
|
|
|
|
# First lock should succeed
|
|
|
|
|
result1 = bk.set_backups_lock(self.backup_dir.name)
|
|
|
|
|
self.assertTrue(result1)
|
|
|
|
|
|
|
|
|
|
# Second lock should fail (same process trying to lock again)
|
|
|
|
|
# Write a different PID to simulate another process
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
with open(lock_path, "w") as f:
|
|
|
|
|
f.write(str(os.getpid()))
|
|
|
|
|
|
|
|
|
|
result2 = bk.set_backups_lock(self.backup_dir.name, force=False)
|
|
|
|
|
self.assertFalse(result2)
|
|
|
|
|
|
|
|
|
|
def test_stale_lock_is_removed(self):
|
|
|
|
|
"""Test that lock from non-existent process is cleaned up"""
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
|
|
|
|
|
# Create lock with non-existent PID
|
|
|
|
|
with open(lock_path, "w") as f:
|
|
|
|
|
f.write("999999")
|
|
|
|
|
|
|
|
|
|
# Lock should succeed by removing stale lock
|
|
|
|
|
result = bk.set_backups_lock(self.backup_dir.name)
|
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
|
|
# Verify new lock has current PID
|
|
|
|
|
with open(lock_path, "r") as f:
|
|
|
|
|
pid = int(f.read().strip())
|
|
|
|
|
self.assertEqual(pid, os.getpid())
|
|
|
|
|
|
|
|
|
|
def test_corrupted_lock_is_handled(self):
|
|
|
|
|
"""Test that corrupted lock file is handled gracefully"""
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
|
|
|
|
|
# Create corrupted lock file (non-numeric content)
|
|
|
|
|
with open(lock_path, "w") as f:
|
|
|
|
|
f.write("not a number")
|
|
|
|
|
|
|
|
|
|
# Lock should succeed by removing corrupted lock
|
|
|
|
|
result = bk.set_backups_lock(self.backup_dir.name)
|
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
|
|
# Verify new lock has current PID
|
|
|
|
|
with open(lock_path, "r") as f:
|
|
|
|
|
pid = int(f.read().strip())
|
|
|
|
|
self.assertEqual(pid, os.getpid())
|
|
|
|
|
|
|
|
|
|
def test_empty_lock_is_handled(self):
|
|
|
|
|
"""Test that empty lock file is handled gracefully"""
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
|
|
|
|
|
# Create empty lock file
|
|
|
|
|
open(lock_path, "w").close()
|
|
|
|
|
|
|
|
|
|
# Lock should succeed by removing empty lock
|
|
|
|
|
result = bk.set_backups_lock(self.backup_dir.name)
|
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
|
|
# Verify new lock has current PID
|
|
|
|
|
with open(lock_path, "r") as f:
|
|
|
|
|
pid = int(f.read().strip())
|
|
|
|
|
self.assertEqual(pid, os.getpid())
|
|
|
|
|
|
|
|
|
|
def test_lock_release(self):
|
|
|
|
|
"""Test that lock file is properly released"""
|
|
|
|
|
bk.set_backups_lock(self.backup_dir.name)
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
self.assertTrue(os.path.exists(lock_path))
|
|
|
|
|
|
|
|
|
|
bk.release_backups_lock(self.backup_dir.name)
|
|
|
|
|
self.assertFalse(os.path.exists(lock_path))
|
|
|
|
|
|
|
|
|
|
def test_release_nonexistent_lock(self):
|
|
|
|
|
"""Test that releasing non-existent lock doesn't raise error"""
|
|
|
|
|
# Should not raise any exception
|
|
|
|
|
bk.release_backups_lock(self.backup_dir.name)
|
|
|
|
|
|
|
|
|
|
lock_path = os.path.join(self.backup_dir.name, bk.LOCK_FILE)
|
|
|
|
|
self.assertFalse(os.path.exists(lock_path))
|
|
|
|
|
|
|
|
|
|
|
2023-05-06 23:06:15 -07:00
|
|
|
# TODO add tests for iterating over backups (marker, dirname)
|