Compare commits
4 Commits
f67abdcc91
...
7a7c4ccaee
| Author | SHA1 | Date | |
|---|---|---|---|
| 7a7c4ccaee | |||
| 27d7da17b2 | |||
| 9a6d44ca21 | |||
| bc1e2f5662 |
@@ -9,9 +9,11 @@ from .commands.doctor import run_doctor
|
||||
from .commands.init_host import run_init_host
|
||||
from .commands.install import run_install
|
||||
from .commands.list_remotes import run_list_remotes
|
||||
from .commands.show_config import run_show_config, dump_yaml
|
||||
from .commands.run_scheduled import run_scheduled
|
||||
from .errors import ConfigError, DoctorError, InstallError, PobsyncError, LockError
|
||||
from .commands.show_config import dump_yaml, run_show_config
|
||||
from .commands.snapshots_list import run_snapshots_list
|
||||
from .commands.snapshots_show import run_snapshots_show
|
||||
from .errors import ConfigError, DoctorError, InstallError, LockError, PobsyncError
|
||||
from .paths import PobsyncPaths
|
||||
from .util import is_tty, to_json_safe
|
||||
|
||||
@@ -67,14 +69,28 @@ def build_parser() -> argparse.ArgumentParser:
|
||||
rp.add_argument("--dry-run", action="store_true", help="Run rsync --dry-run without creating directories")
|
||||
rp.set_defaults(_handler=cmd_run_scheduled)
|
||||
|
||||
# snapshots
|
||||
sn = sub.add_parser("snapshots", help="Inspect snapshots (list/show)")
|
||||
sn_sub = sn.add_subparsers(dest="snapshots_cmd", required=True)
|
||||
|
||||
sn_list = sn_sub.add_parser("list", help="List snapshots for a host")
|
||||
sn_list.add_argument("host", help="Host name")
|
||||
sn_list.add_argument("--kind", default="all", help="scheduled|manual|incomplete|all (default: all)")
|
||||
sn_list.add_argument("--limit", type=int, default=20, help="Max results (default: 20)")
|
||||
sn_list.add_argument("--include-incomplete", action="store_true", help="Include .incomplete when --kind=all (default: false)")
|
||||
sn_list.set_defaults(_handler=cmd_snapshots_list)
|
||||
|
||||
sn_show = sn_sub.add_parser("show", help="Show snapshot metadata")
|
||||
sn_show.add_argument("host", help="Host name")
|
||||
sn_show.add_argument("--kind", required=True, help="scheduled|manual|incomplete")
|
||||
sn_show.add_argument("dirname", help="Snapshot directory name (e.g. 20260202-223807Z__K3VQEVH7)")
|
||||
sn_show.add_argument("--tail", type=int, default=None, help="Show last N lines of rsync.log")
|
||||
sn_show.set_defaults(_handler=cmd_snapshots_show)
|
||||
|
||||
return p
|
||||
|
||||
|
||||
def parse_retention(s: str) -> dict[str, int]:
|
||||
"""
|
||||
Parse format: daily=14,weekly=8,monthly=12,yearly=0
|
||||
"""
|
||||
out: dict[str, int] = {}
|
||||
parts = [p.strip() for p in s.split(",") if p.strip()]
|
||||
for part in parts:
|
||||
@@ -89,7 +105,6 @@ def parse_retention(s: str) -> dict[str, int]:
|
||||
if n < 0:
|
||||
raise ValueError(f"Retention must be >= 0 for {k}")
|
||||
out[k] = n
|
||||
# Ensure all keys exist (default missing to 0)
|
||||
for k in ("daily", "weekly", "monthly", "yearly"):
|
||||
out.setdefault(k, 0)
|
||||
return out
|
||||
@@ -100,22 +115,18 @@ def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
print(json.dumps(to_json_safe(result), indent=2, sort_keys=False))
|
||||
return
|
||||
|
||||
# Minimal human output
|
||||
if result.get("ok") is True:
|
||||
print("OK")
|
||||
else:
|
||||
print("FAILED")
|
||||
|
||||
# Standard action list
|
||||
if "actions" in result:
|
||||
for a in result["actions"]:
|
||||
print(f"- {a}")
|
||||
|
||||
# Single action (e.g. init-host)
|
||||
if "action" in result:
|
||||
print(f"- {result['action']}")
|
||||
|
||||
# Doctor-style results list
|
||||
if "results" in result:
|
||||
for r in result["results"]:
|
||||
ok = r.get("ok", False)
|
||||
@@ -136,7 +147,6 @@ def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
line += f" {msg}"
|
||||
print(line)
|
||||
|
||||
# list-remotes style output
|
||||
if "hosts" in result:
|
||||
for h in result["hosts"]:
|
||||
print(h)
|
||||
@@ -147,6 +157,18 @@ def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
if "base" in result and result["base"]:
|
||||
print(f"- base {result['base']}")
|
||||
|
||||
if "snapshots" in result:
|
||||
for s in result["snapshots"]:
|
||||
kind = s.get("kind", "?")
|
||||
dirname = s.get("dirname", "?")
|
||||
status = s.get("status") or "unknown"
|
||||
started_at = s.get("started_at") or ""
|
||||
dur = s.get("duration_seconds")
|
||||
dur_s = f"{dur}s" if isinstance(dur, int) else ""
|
||||
extra = " ".join(x for x in [started_at, dur_s] if x)
|
||||
if extra:
|
||||
extra = " " + extra
|
||||
print(f"- {kind} {dirname} {status}{extra}")
|
||||
|
||||
|
||||
def cmd_install(args: argparse.Namespace) -> int:
|
||||
@@ -178,12 +200,16 @@ def cmd_init_host(args: argparse.Namespace) -> int:
|
||||
raise ConfigError("--address is required (or interactive input)")
|
||||
|
||||
if args.retention is None:
|
||||
# In phase 1 we require retention explicitly or via install default.
|
||||
# We'll read global.yaml if present to fetch retention_defaults.
|
||||
from .config.load import load_global_config
|
||||
|
||||
paths = PobsyncPaths(home=prefix)
|
||||
global_cfg = load_global_config(paths.global_config_path)
|
||||
retention = global_cfg.get("retention_defaults") or {"daily": 14, "weekly": 8, "monthly": 12, "yearly": 0}
|
||||
retention = global_cfg.get("retention_defaults") or {
|
||||
"daily": 14,
|
||||
"weekly": 8,
|
||||
"monthly": 12,
|
||||
"yearly": 0,
|
||||
}
|
||||
else:
|
||||
retention = parse_retention(args.retention)
|
||||
|
||||
@@ -220,7 +246,12 @@ def cmd_show_config(args: argparse.Namespace) -> int:
|
||||
|
||||
def cmd_doctor(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_doctor(prefix=prefix, host=args.host, connect=bool(args.connect), rsync_dry_run=bool(args.rsync_dry_run))
|
||||
result = run_doctor(
|
||||
prefix=prefix,
|
||||
host=args.host,
|
||||
connect=bool(args.connect),
|
||||
rsync_dry_run=bool(args.rsync_dry_run),
|
||||
)
|
||||
_print(result, as_json=bool(args.json))
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
@@ -239,6 +270,43 @@ def cmd_run_scheduled(args: argparse.Namespace) -> int:
|
||||
return 0 if result.get("ok") else 2
|
||||
|
||||
|
||||
def cmd_snapshots_list(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_snapshots_list(
|
||||
prefix=prefix,
|
||||
host=args.host,
|
||||
kind=args.kind,
|
||||
limit=int(args.limit),
|
||||
include_incomplete=bool(args.include_incomplete),
|
||||
)
|
||||
_print(result, as_json=bool(args.json))
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def cmd_snapshots_show(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_snapshots_show(
|
||||
prefix=prefix,
|
||||
host=args.host,
|
||||
kind=args.kind,
|
||||
dirname=args.dirname,
|
||||
tail=args.tail,
|
||||
)
|
||||
|
||||
if args.json:
|
||||
_print(result, as_json=True)
|
||||
else:
|
||||
print(dump_yaml(result.get("meta", {})).rstrip())
|
||||
if result.get("log_path"):
|
||||
print(f"\n# rsync.log: {result['log_path']}")
|
||||
|
||||
if result.get("log_tail"):
|
||||
print("\n# rsync.log (tail)")
|
||||
for line in result["log_tail"]:
|
||||
print(line)
|
||||
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
parser = build_parser()
|
||||
@@ -247,6 +315,7 @@ def main(argv: list[str] | None = None) -> int:
|
||||
try:
|
||||
handler = getattr(args, "_handler")
|
||||
return int(handler(args))
|
||||
|
||||
except PobsyncError as e:
|
||||
if args.json:
|
||||
_print({"ok": False, "error": str(e), "type": type(e).__name__}, as_json=True)
|
||||
|
||||
@@ -3,8 +3,6 @@ from __future__ import annotations
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
from ..config.load import load_global_config, load_host_config
|
||||
from ..config.merge import build_effective_config
|
||||
from ..errors import ConfigError
|
||||
@@ -20,7 +18,7 @@ from ..snapshot import (
|
||||
snapshot_dir_name,
|
||||
utc_now,
|
||||
)
|
||||
from ..util import ensure_dir, realpath_startswith, sanitize_host
|
||||
from ..util import ensure_dir, realpath_startswith, sanitize_host, write_yaml_atomic
|
||||
|
||||
|
||||
def _host_backup_dirs(backup_root: str, host: str) -> HostBackupDirs:
|
||||
@@ -56,8 +54,21 @@ def select_scheduled_base(dirs: HostBackupDirs) -> Path | None:
|
||||
return _find_latest_snapshot(dirs.manual)
|
||||
|
||||
|
||||
def write_meta(path: Path, data: dict[str, Any]) -> None:
|
||||
path.write_text(yaml.safe_dump(data, sort_keys=False), encoding="utf-8")
|
||||
def _base_meta_from_path(base_dir: Path | None) -> dict[str, Any] | None:
|
||||
if base_dir is None:
|
||||
return None
|
||||
|
||||
kind = base_dir.parent.name
|
||||
if kind not in ("scheduled", "manual"):
|
||||
# Should not happen with current selection logic, but keep meta robust.
|
||||
kind = "unknown"
|
||||
|
||||
return {
|
||||
"kind": kind,
|
||||
"dirname": base_dir.name,
|
||||
"id": None,
|
||||
"path": None,
|
||||
}
|
||||
|
||||
|
||||
def run_scheduled(prefix: Path, host: str, dry_run: bool) -> dict[str, Any]:
|
||||
@@ -158,31 +169,15 @@ def run_scheduled(prefix: Path, host: str, dry_run: bool) -> dict[str, Any]:
|
||||
incomplete_dir = dirs.incomplete / snap_name
|
||||
data_dir = incomplete_dir / "data"
|
||||
meta_dir = incomplete_dir / "meta"
|
||||
|
||||
|
||||
ensure_dir(data_dir)
|
||||
ensure_dir(meta_dir)
|
||||
|
||||
meta_path = meta_dir / "meta.yaml"
|
||||
log_path = meta_dir / "rsync.log"
|
||||
|
||||
meta: dict[str, Any] = {
|
||||
"id": snap_id,
|
||||
"host": host,
|
||||
"type": "scheduled",
|
||||
"label": None,
|
||||
"status": "running",
|
||||
"started_at": format_iso_z(ts),
|
||||
"ended_at": None,
|
||||
"base_snapshot": None,
|
||||
"rsync": {"exit_code": None, "stats": {}},
|
||||
"overrides": {"includes": [], "excludes": [], "base": None},
|
||||
}
|
||||
|
||||
log_path.touch(exist_ok=True)
|
||||
write_meta(meta_path, meta)
|
||||
|
||||
# Pre-build command so we can record it in metadata.
|
||||
dest = str(data_dir) + "/"
|
||||
|
||||
cmd = build_rsync_command(
|
||||
rsync_binary=str(rsync_binary),
|
||||
rsync_args=list(rsync_args),
|
||||
@@ -197,18 +192,38 @@ def run_scheduled(prefix: Path, host: str, dry_run: bool) -> dict[str, Any]:
|
||||
extra_includes=list(includes),
|
||||
)
|
||||
|
||||
meta: dict[str, Any] = {
|
||||
"schema_version": 1,
|
||||
"id": snap_id,
|
||||
"host": host,
|
||||
"type": "scheduled",
|
||||
"label": None,
|
||||
"status": "running",
|
||||
"started_at": format_iso_z(ts),
|
||||
"ended_at": None,
|
||||
"duration_seconds": None,
|
||||
"base": _base_meta_from_path(base_dir),
|
||||
"rsync": {"exit_code": None, "command": cmd, "stats": {}},
|
||||
# Keep existing fields for future expansion / compatibility with current structure.
|
||||
"overrides": {"includes": [], "excludes": [], "base": None},
|
||||
}
|
||||
|
||||
log_path.touch(exist_ok=True)
|
||||
write_yaml_atomic(meta_path, meta)
|
||||
|
||||
result = run_rsync(cmd, log_path=log_path, timeout_seconds=timeout_seconds)
|
||||
|
||||
end_ts = utc_now()
|
||||
meta["ended_at"] = format_iso_z(end_ts)
|
||||
meta["duration_seconds"] = int((end_ts - ts).total_seconds())
|
||||
meta["rsync"]["exit_code"] = result.exit_code
|
||||
meta["status"] = "success" if result.exit_code == 0 else "failed"
|
||||
write_meta(meta_path, meta)
|
||||
write_yaml_atomic(meta_path, meta)
|
||||
|
||||
if not log_path.exists():
|
||||
meta["status"] = "failed"
|
||||
meta["rsync"]["exit_code"] = 99
|
||||
write_meta(meta_path, meta)
|
||||
write_yaml_atomic(meta_path, meta)
|
||||
return {
|
||||
"ok": False,
|
||||
"dry_run": False,
|
||||
|
||||
108
src/pobsync/commands/snapshots_list.py
Normal file
108
src/pobsync/commands/snapshots_list.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from ..config.load import load_global_config, load_host_config
|
||||
from ..config.merge import build_effective_config
|
||||
from ..errors import ConfigError
|
||||
from ..paths import PobsyncPaths
|
||||
from ..snapshot_meta import iter_snapshot_dirs, normalize_kind, read_snapshot_meta, resolve_host_root
|
||||
from ..util import sanitize_host
|
||||
|
||||
|
||||
def _parse_iso_z(s: Any) -> Optional[datetime]:
|
||||
"""
|
||||
Parse timestamps like '2026-02-02T22:38:07Z' into aware UTC datetime.
|
||||
Returns None if invalid.
|
||||
"""
|
||||
if not isinstance(s, str) or not s:
|
||||
return None
|
||||
# Strictly support trailing 'Z' (UTC) to avoid locale/timezone ambiguity.
|
||||
if not s.endswith("Z"):
|
||||
return None
|
||||
try:
|
||||
dt = datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ")
|
||||
return dt.replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _sort_key(item: dict[str, Any]) -> Tuple[int, datetime, str]:
|
||||
"""
|
||||
Sort by:
|
||||
1) Has started_at meta (1) before missing (0)
|
||||
2) started_at descending
|
||||
3) dirname descending (lexicographic)
|
||||
"""
|
||||
dt = _parse_iso_z(item.get("started_at"))
|
||||
has_dt = 1 if dt is not None else 0
|
||||
# Use epoch for missing to keep key comparable; has_dt separates them anyway.
|
||||
dt2 = dt if dt is not None else datetime.fromtimestamp(0, tz=timezone.utc)
|
||||
dirname = item.get("dirname") or ""
|
||||
return (has_dt, dt2, dirname)
|
||||
|
||||
|
||||
def run_snapshots_list(prefix: Path, host: str, kind: str, limit: int, include_incomplete: bool) -> dict[str, Any]:
|
||||
host = sanitize_host(host)
|
||||
k = normalize_kind(kind)
|
||||
|
||||
if limit < 1:
|
||||
raise ConfigError("--limit must be >= 1")
|
||||
|
||||
paths = PobsyncPaths(home=prefix)
|
||||
|
||||
global_cfg = load_global_config(paths.global_config_path)
|
||||
host_cfg = load_host_config(paths.hosts_dir / f"{host}.yaml")
|
||||
cfg = build_effective_config(global_cfg, host_cfg)
|
||||
|
||||
backup_root = cfg.get("backup_root")
|
||||
if not isinstance(backup_root, str) or not backup_root.startswith("/"):
|
||||
raise ConfigError("Invalid backup_root in effective config")
|
||||
|
||||
host_root = resolve_host_root(backup_root, host)
|
||||
|
||||
kinds: list[str]
|
||||
if k == "all":
|
||||
kinds = ["scheduled", "manual"]
|
||||
if include_incomplete:
|
||||
kinds.append("incomplete")
|
||||
else:
|
||||
kinds = [k]
|
||||
|
||||
items: list[dict[str, Any]] = []
|
||||
|
||||
for kk in kinds:
|
||||
for d in iter_snapshot_dirs(host_root, kk):
|
||||
meta = read_snapshot_meta(d)
|
||||
|
||||
items.append(
|
||||
{
|
||||
"kind": kk,
|
||||
"dirname": d.name,
|
||||
"path": str(d),
|
||||
"status": meta.get("status"),
|
||||
"started_at": meta.get("started_at"),
|
||||
"ended_at": meta.get("ended_at"),
|
||||
"duration_seconds": meta.get("duration_seconds"),
|
||||
"base": meta.get("base"),
|
||||
"id": meta.get("id"),
|
||||
}
|
||||
)
|
||||
|
||||
# Global sort: newest first
|
||||
items.sort(key=_sort_key, reverse=True)
|
||||
|
||||
# Apply limit after sorting
|
||||
out = items[:limit]
|
||||
|
||||
return {
|
||||
"ok": True,
|
||||
"host": host,
|
||||
"kind": k,
|
||||
"include_incomplete": bool(include_incomplete),
|
||||
"limit": limit,
|
||||
"snapshots": out,
|
||||
}
|
||||
|
||||
81
src/pobsync/commands/snapshots_show.py
Normal file
81
src/pobsync/commands/snapshots_show.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, List
|
||||
|
||||
from ..config.load import load_global_config, load_host_config
|
||||
from ..config.merge import build_effective_config
|
||||
from ..errors import ConfigError
|
||||
from ..paths import PobsyncPaths
|
||||
from ..snapshot_meta import (
|
||||
build_snapshot_ref,
|
||||
normalize_kind,
|
||||
read_snapshot_meta,
|
||||
resolve_host_root,
|
||||
snapshot_log_path,
|
||||
)
|
||||
from ..util import sanitize_host
|
||||
|
||||
|
||||
def _tail_lines(path: Path, n: int) -> List[str]:
|
||||
"""
|
||||
Read last n lines of a text file.
|
||||
Simple and safe; rsync logs are not huge in normal cases.
|
||||
"""
|
||||
try:
|
||||
lines = path.read_text(encoding="utf-8", errors="replace").splitlines()
|
||||
return lines[-n:]
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
|
||||
def run_snapshots_show(
|
||||
prefix: Path,
|
||||
host: str,
|
||||
kind: str,
|
||||
dirname: str,
|
||||
tail: int | None,
|
||||
) -> dict[str, Any]:
|
||||
host = sanitize_host(host)
|
||||
k = normalize_kind(kind)
|
||||
if k == "all":
|
||||
raise ConfigError("kind must be scheduled, manual, or incomplete for show")
|
||||
|
||||
if tail is not None and tail < 1:
|
||||
raise ConfigError("--tail must be >= 1")
|
||||
|
||||
paths = PobsyncPaths(home=prefix)
|
||||
|
||||
global_cfg = load_global_config(paths.global_config_path)
|
||||
host_cfg = load_host_config(paths.hosts_dir / f"{host}.yaml")
|
||||
cfg = build_effective_config(global_cfg, host_cfg)
|
||||
|
||||
backup_root = cfg.get("backup_root")
|
||||
if not isinstance(backup_root, str) or not backup_root.startswith("/"):
|
||||
raise ConfigError("Invalid backup_root in effective config")
|
||||
|
||||
host_root = resolve_host_root(backup_root, host)
|
||||
|
||||
ref = build_snapshot_ref(host=host, host_root=host_root, kind=k, dirname=dirname)
|
||||
if not ref.path.exists():
|
||||
raise ConfigError(f"Snapshot not found: {k}/{dirname}")
|
||||
|
||||
meta = read_snapshot_meta(ref.path)
|
||||
log_path = snapshot_log_path(ref.path)
|
||||
|
||||
log_tail = None
|
||||
if tail is not None and log_path.exists():
|
||||
log_tail = _tail_lines(log_path, tail)
|
||||
|
||||
return {
|
||||
"ok": True,
|
||||
"host": host,
|
||||
"kind": k,
|
||||
"dirname": dirname,
|
||||
"path": str(ref.path),
|
||||
"meta_path": str(ref.path / "meta" / "meta.yaml"),
|
||||
"log_path": str(log_path) if log_path.exists() else None,
|
||||
"meta": meta,
|
||||
"log_tail": log_tail,
|
||||
}
|
||||
|
||||
98
src/pobsync/snapshot_meta.py
Normal file
98
src/pobsync/snapshot_meta.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable
|
||||
|
||||
from .errors import ConfigError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SnapshotRef:
|
||||
host: str
|
||||
kind: str # scheduled | manual | incomplete
|
||||
dirname: str
|
||||
path: Path
|
||||
|
||||
|
||||
def snapshot_meta_path(snapshot_dir: Path) -> Path:
|
||||
return snapshot_dir / "meta" / "meta.yaml"
|
||||
|
||||
|
||||
def snapshot_log_path(snapshot_dir: Path) -> Path:
|
||||
return snapshot_dir / "meta" / "rsync.log"
|
||||
|
||||
|
||||
def read_snapshot_meta(snapshot_dir: Path) -> dict[str, Any]:
|
||||
"""
|
||||
Read meta/meta.yaml for a snapshot directory.
|
||||
Returns {} if missing/unreadable YAML; callers can decide how strict to be.
|
||||
"""
|
||||
import yaml # type: ignore[import-not-found]
|
||||
|
||||
p = snapshot_meta_path(snapshot_dir)
|
||||
if not p.exists():
|
||||
return {}
|
||||
|
||||
try:
|
||||
data = yaml.safe_load(p.read_text(encoding="utf-8"))
|
||||
if data is None:
|
||||
return {}
|
||||
if not isinstance(data, dict):
|
||||
return {}
|
||||
return data
|
||||
except OSError:
|
||||
return {}
|
||||
except Exception:
|
||||
# YAML parse errors should not crash listing; return empty meta.
|
||||
return {}
|
||||
|
||||
|
||||
def iter_snapshot_dirs(host_root: Path, kind: str) -> Iterable[Path]:
|
||||
"""
|
||||
Yield snapshot directories for a given host root and kind.
|
||||
kind: scheduled|manual|incomplete
|
||||
"""
|
||||
if kind == "scheduled":
|
||||
parent = host_root / "scheduled"
|
||||
elif kind == "manual":
|
||||
parent = host_root / "manual"
|
||||
elif kind == "incomplete":
|
||||
parent = host_root / ".incomplete"
|
||||
else:
|
||||
raise ValueError(f"Invalid kind: {kind!r}")
|
||||
|
||||
if not parent.exists():
|
||||
return []
|
||||
|
||||
# Snapshot dirs are named <ts>__<id> and sorted lexicographically == chronological
|
||||
dirs = [p for p in parent.iterdir() if p.is_dir()]
|
||||
dirs.sort(reverse=True)
|
||||
return dirs
|
||||
|
||||
|
||||
def build_snapshot_ref(host: str, host_root: Path, kind: str, dirname: str) -> SnapshotRef:
|
||||
if kind == "scheduled":
|
||||
p = host_root / "scheduled" / dirname
|
||||
elif kind == "manual":
|
||||
p = host_root / "manual" / dirname
|
||||
elif kind == "incomplete":
|
||||
p = host_root / ".incomplete" / dirname
|
||||
else:
|
||||
raise ValueError(f"Invalid kind: {kind!r}")
|
||||
|
||||
return SnapshotRef(host=host, kind=kind, dirname=dirname, path=p)
|
||||
|
||||
|
||||
def resolve_host_root(backup_root: str, host: str) -> Path:
|
||||
if not backup_root.startswith("/"):
|
||||
raise ConfigError("backup_root must be an absolute path")
|
||||
return Path(backup_root) / host
|
||||
|
||||
|
||||
def normalize_kind(kind: str) -> str:
|
||||
k = kind.strip().lower()
|
||||
if k in {"scheduled", "manual", "incomplete", "all"}:
|
||||
return k
|
||||
raise ConfigError("kind must be one of: scheduled, manual, incomplete, all")
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -25,7 +26,6 @@ def sanitize_host(host: str) -> str:
|
||||
return host
|
||||
|
||||
|
||||
|
||||
def ensure_dir(path: Path, mode: int = 0o750) -> None:
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
@@ -62,3 +62,60 @@ def to_json_safe(obj: Any) -> Any:
|
||||
return obj
|
||||
return str(obj)
|
||||
|
||||
|
||||
def write_yaml_atomic(path: Path, data: Any) -> None:
|
||||
"""
|
||||
Write YAML to `path` atomically.
|
||||
|
||||
Strategy:
|
||||
- Write to a temp file in the same directory
|
||||
- fsync temp file
|
||||
- os.replace(temp, path) (atomic on POSIX)
|
||||
- fsync directory entry (best-effort)
|
||||
|
||||
This helps avoid partial/corrupt meta files on crashes.
|
||||
"""
|
||||
# Local import to keep module load light; PyYAML is already a project dependency.
|
||||
import yaml # type: ignore[import-not-found]
|
||||
|
||||
parent = path.parent
|
||||
parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
tmp_fd: int | None = None
|
||||
tmp_path: Path | None = None
|
||||
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w",
|
||||
encoding="utf-8",
|
||||
dir=str(parent),
|
||||
prefix=path.name + ".",
|
||||
suffix=".tmp",
|
||||
delete=False,
|
||||
) as tf:
|
||||
tmp_fd = tf.fileno()
|
||||
tmp_path = Path(tf.name)
|
||||
tf.write(yaml.safe_dump(data, sort_keys=False))
|
||||
tf.flush()
|
||||
os.fsync(tmp_fd)
|
||||
|
||||
os.replace(str(tmp_path), str(path))
|
||||
|
||||
# Best-effort directory fsync (helps durability across power loss on some FS)
|
||||
try:
|
||||
dir_fd = os.open(str(parent), os.O_DIRECTORY)
|
||||
try:
|
||||
os.fsync(dir_fd)
|
||||
finally:
|
||||
os.close(dir_fd)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
finally:
|
||||
# If anything failed before replace(), try to clean up temp file
|
||||
if tmp_path is not None and tmp_path.exists():
|
||||
try:
|
||||
tmp_path.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
Reference in New Issue
Block a user