|
|
|
|
@@ -8,6 +8,9 @@ from urllib.parse import urlparse
|
|
|
|
|
|
|
|
|
|
from cached_property import cached_property_with_ttl
|
|
|
|
|
import requests
|
|
|
|
|
from requests.adapters import HTTPAdapter
|
|
|
|
|
from urllib3.util.retry import Retry
|
|
|
|
|
import seafile
|
|
|
|
|
|
|
|
|
|
from dsc import const
|
|
|
|
|
from dsc.misc import create_dir, hide_password
|
|
|
|
|
@@ -21,13 +24,36 @@ class SeafileClient:
|
|
|
|
|
user: str,
|
|
|
|
|
passwd: str,
|
|
|
|
|
app_dir: str = const.DEFAULT_APP_DIR):
|
|
|
|
|
up = urlparse(requests.get(f"http://{host}").url)
|
|
|
|
|
self.url = f"{up.scheme}://{up.netloc}"
|
|
|
|
|
self.user = user
|
|
|
|
|
self.password = passwd
|
|
|
|
|
self.app_dir = os.path.abspath(app_dir)
|
|
|
|
|
self.rpc = seafile.RpcClient(os.path.join(self.app_dir, 'seafile-data', 'seafile.sock'))
|
|
|
|
|
self.__token = None
|
|
|
|
|
|
|
|
|
|
# determine server URL (assume HTTPS unless explicitly specified)
|
|
|
|
|
if host.startswith('http://') or host.startswith('https://'):
|
|
|
|
|
self.url = host.rstrip('/')
|
|
|
|
|
else:
|
|
|
|
|
self.url = f"https://{host}"
|
|
|
|
|
|
|
|
|
|
# configure session with retry strategy
|
|
|
|
|
# enable urllib3 retry logging at DEBUG level (shows retry attempts)
|
|
|
|
|
urllib3_logger = logging.getLogger("urllib3.connectionpool")
|
|
|
|
|
urllib3_logger.setLevel(logging.DEBUG)
|
|
|
|
|
urllib3_logger.propagate = True
|
|
|
|
|
|
|
|
|
|
self.session = requests.Session()
|
|
|
|
|
retry_strategy = Retry(
|
|
|
|
|
total=30,
|
|
|
|
|
backoff_factor=2,
|
|
|
|
|
backoff_max=60,
|
|
|
|
|
status_forcelist=[500, 502, 503, 504],
|
|
|
|
|
allowed_methods=["GET", "POST"]
|
|
|
|
|
)
|
|
|
|
|
adapter = HTTPAdapter(max_retries=retry_strategy)
|
|
|
|
|
self.session.mount("http://", adapter)
|
|
|
|
|
self.session.mount("https://", adapter)
|
|
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
|
return f"SeafileClient({self.user}@{self.url})"
|
|
|
|
|
|
|
|
|
|
@@ -39,7 +65,7 @@ class SeafileClient:
|
|
|
|
|
if self.__token is None:
|
|
|
|
|
url = f"{self.url}/api2/auth-token/"
|
|
|
|
|
_lg.info("Fetching token: %s", url)
|
|
|
|
|
r = requests.post(url, data={"username": self.user, "password": self.password})
|
|
|
|
|
r = self.session.post(url, data={"username": self.user, "password": self.password})
|
|
|
|
|
if r.status_code != 200:
|
|
|
|
|
raise RuntimeError(f"Can't get token: {r.text}")
|
|
|
|
|
self.__token = r.json()["token"]
|
|
|
|
|
@@ -50,7 +76,7 @@ class SeafileClient:
|
|
|
|
|
url = f"{self.url}/api2/repos/"
|
|
|
|
|
_lg.info("Fetching remote libraries: %s", url)
|
|
|
|
|
auth_header = {"Authorization": f"Token {self.token}"}
|
|
|
|
|
r = requests.get(url, headers=auth_header)
|
|
|
|
|
r = self.session.get(url, headers=auth_header)
|
|
|
|
|
if r.status_code != 200:
|
|
|
|
|
raise RuntimeError(r.text)
|
|
|
|
|
r_libs = {lib["id"]: lib["name"] for lib in r.json()}
|
|
|
|
|
@@ -129,31 +155,84 @@ class SeafileClient:
|
|
|
|
|
)
|
|
|
|
|
subprocess.run(self.__gen_cmd(" ".join(cmd)))
|
|
|
|
|
|
|
|
|
|
def get_status(self):
|
|
|
|
|
cmd = "seaf-cli status"
|
|
|
|
|
_lg.debug("Fetching seafile client status: %s", cmd)
|
|
|
|
|
out = subprocess.check_output(self.__gen_cmd(cmd))
|
|
|
|
|
out = out.decode().splitlines()
|
|
|
|
|
def __print_tx_task(self, tx_task) -> str:
|
|
|
|
|
""" Print transfer task status """
|
|
|
|
|
try:
|
|
|
|
|
percentage = tx_task.block_done / tx_task.block_total * 100
|
|
|
|
|
tx_rate = tx_task.rate / 1024.0
|
|
|
|
|
return f" {percentage:.1f}%, {tx_rate:.1f}KB/s"
|
|
|
|
|
except ZeroDivisionError:
|
|
|
|
|
return ""
|
|
|
|
|
|
|
|
|
|
def get_status(self) -> dict:
|
|
|
|
|
""" Get status of all libraries """
|
|
|
|
|
statuses = dict()
|
|
|
|
|
for line in out:
|
|
|
|
|
if line.startswith("#") or not line.strip():
|
|
|
|
|
|
|
|
|
|
# fetch statuses of libraries being cloned
|
|
|
|
|
tasks = self.rpc.get_clone_tasks()
|
|
|
|
|
for clone_task in tasks:
|
|
|
|
|
if clone_task.state == "done":
|
|
|
|
|
continue
|
|
|
|
|
lib, status = line.split(sep="\t", maxsplit=1)
|
|
|
|
|
lib = lib.strip()
|
|
|
|
|
status = " ".join(status.split())
|
|
|
|
|
statuses[lib] = status
|
|
|
|
|
|
|
|
|
|
elif clone_task.state == "fetch":
|
|
|
|
|
statuses[clone_task.repo_name] = "downloading"
|
|
|
|
|
tx_task = self.rpc.find_transfer_task(clone_task.repo_id)
|
|
|
|
|
statuses[clone_task.repo_name] += self.__print_tx_task(tx_task)
|
|
|
|
|
|
|
|
|
|
elif clone_task.state == "error":
|
|
|
|
|
err = self.rpc.sync_error_id_to_str(clone_task.error)
|
|
|
|
|
statuses[clone_task.repo_name] = f"error: {err}"
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
statuses[clone_task.repo_name] = clone_task.state
|
|
|
|
|
|
|
|
|
|
# fetch statuses of synced libraries
|
|
|
|
|
repos = self.rpc.get_repo_list(-1, -1)
|
|
|
|
|
for repo in repos:
|
|
|
|
|
auto_sync_enabled = self.rpc.is_auto_sync_enabled()
|
|
|
|
|
if not auto_sync_enabled or not repo.auto_sync:
|
|
|
|
|
statuses[repo.name] = "auto sync disabled"
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
sync_task = self.rpc.get_repo_sync_task(repo.id)
|
|
|
|
|
if sync_task is None:
|
|
|
|
|
statuses[repo.name] = "waiting for sync"
|
|
|
|
|
|
|
|
|
|
elif sync_task.state in ("uploading", "downloading"):
|
|
|
|
|
statuses[repo.name] = sync_task.state
|
|
|
|
|
tx_task = self.rpc.find_transfer_task(repo.id)
|
|
|
|
|
|
|
|
|
|
if sync_task.state == "downloading":
|
|
|
|
|
if tx_task.rt_state == "data":
|
|
|
|
|
statuses[repo.name] += " files"
|
|
|
|
|
elif tx_task.rt_state == "fs":
|
|
|
|
|
statuses[repo.name] += " file list"
|
|
|
|
|
|
|
|
|
|
statuses[repo.name] += self.__print_tx_task(tx_task)
|
|
|
|
|
|
|
|
|
|
elif sync_task.state == "error":
|
|
|
|
|
err = self.rpc.sync_error_id_to_str(sync_task.error)
|
|
|
|
|
statuses[repo.name] = f"error: {err}"
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
statuses[repo.name] = sync_task.state
|
|
|
|
|
|
|
|
|
|
return statuses
|
|
|
|
|
|
|
|
|
|
def watch_status(self):
|
|
|
|
|
prev_status = dict()
|
|
|
|
|
max_name_len = 0
|
|
|
|
|
fmt = "Library {:%ds} {}" % max_name_len
|
|
|
|
|
while True:
|
|
|
|
|
time.sleep(const.STATUS_POLL_PERIOD)
|
|
|
|
|
cur_status = self.get_status()
|
|
|
|
|
for folder, state in cur_status.items():
|
|
|
|
|
if state != prev_status.get(folder):
|
|
|
|
|
logging.info("Library %s:\t%s", folder, state)
|
|
|
|
|
prev_status[folder] = cur_status[folder]
|
|
|
|
|
for library, state in cur_status.items():
|
|
|
|
|
if state != prev_status.get(library):
|
|
|
|
|
if 30 > len(library) > max_name_len:
|
|
|
|
|
max_name_len = len(library)
|
|
|
|
|
fmt = "Library {:%ds} {}" % max_name_len
|
|
|
|
|
logging.info(fmt.format(library, state))
|
|
|
|
|
prev_status[library] = cur_status[library]
|
|
|
|
|
|
|
|
|
|
def get_local_libraries(self) -> set:
|
|
|
|
|
cmd = "seaf-cli list"
|
|
|
|
|
|