feat(schedule): add scheduling commands & clarify create vs update in output
This commit is contained in:
@@ -12,12 +12,15 @@ from .commands.list_remotes import run_list_remotes
|
||||
from .commands.retention_apply import run_retention_apply
|
||||
from .commands.retention_plan import run_retention_plan
|
||||
from .commands.run_scheduled import run_scheduled
|
||||
from .commands.schedule_create import run_schedule_create
|
||||
from .commands.schedule_list import run_schedule_list
|
||||
from .commands.schedule_remove import run_schedule_remove
|
||||
from .commands.show_config import dump_yaml, run_show_config
|
||||
from .commands.snapshots_list import run_snapshots_list
|
||||
from .commands.snapshots_show import run_snapshots_show
|
||||
from .errors import ConfigError, LockError, PobsyncError
|
||||
from .paths import PobsyncPaths
|
||||
from .util import is_tty, to_json_safe
|
||||
from .errors import LockError, PobsyncError
|
||||
from .schedule import CRON_FILE_DEFAULT
|
||||
from .util import to_json_safe
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
@@ -70,17 +73,8 @@ def build_parser() -> argparse.ArgumentParser:
|
||||
rp.add_argument("host", help="Host to back up")
|
||||
rp.add_argument("--dry-run", action="store_true", help="Run rsync --dry-run without creating directories")
|
||||
rp.add_argument("--prune", action="store_true", help="Apply retention after a successful run (default: false)")
|
||||
rp.add_argument(
|
||||
"--prune-max-delete",
|
||||
type=int,
|
||||
default=10,
|
||||
help="Refuse to prune more than N snapshots (default: 10; set 0 to block)",
|
||||
)
|
||||
rp.add_argument(
|
||||
"--prune-protect-bases",
|
||||
action="store_true",
|
||||
help="When pruning, also keep base snapshots referenced in meta (default: false)",
|
||||
)
|
||||
rp.add_argument("--prune-max-delete", type=int, default=10, help="Refuse to prune more than N snapshots (default: 10)")
|
||||
rp.add_argument("--prune-protect-bases", action="store_true", help="When pruning, also keep base snapshots referenced in meta")
|
||||
rp.set_defaults(_handler=cmd_run_scheduled)
|
||||
|
||||
# snapshots
|
||||
@@ -91,17 +85,13 @@ def build_parser() -> argparse.ArgumentParser:
|
||||
sn_list.add_argument("host", help="Host name")
|
||||
sn_list.add_argument("--kind", default="all", help="scheduled|manual|incomplete|all (default: all)")
|
||||
sn_list.add_argument("--limit", type=int, default=20, help="Max results (default: 20)")
|
||||
sn_list.add_argument(
|
||||
"--include-incomplete",
|
||||
action="store_true",
|
||||
help="Include .incomplete when --kind=all (default: false)",
|
||||
)
|
||||
sn_list.add_argument("--include-incomplete", action="store_true", help="Include .incomplete when --kind=all")
|
||||
sn_list.set_defaults(_handler=cmd_snapshots_list)
|
||||
|
||||
sn_show = sn_sub.add_parser("show", help="Show snapshot metadata")
|
||||
sn_show.add_argument("host", help="Host name")
|
||||
sn_show.add_argument("--kind", required=True, help="scheduled|manual|incomplete")
|
||||
sn_show.add_argument("dirname", help="Snapshot directory name (e.g. 20260202-223807Z__K3VQEVH7)")
|
||||
sn_show.add_argument("dirname", help="Snapshot directory name")
|
||||
sn_show.add_argument("--tail", type=int, default=None, help="Show last N lines of rsync.log")
|
||||
sn_show.set_defaults(_handler=cmd_snapshots_show)
|
||||
|
||||
@@ -112,20 +102,78 @@ def build_parser() -> argparse.ArgumentParser:
|
||||
rt_plan = rt_sub.add_parser("plan", help="Show retention prune plan (dry-run)")
|
||||
rt_plan.add_argument("host", help="Host name")
|
||||
rt_plan.add_argument("--kind", default="scheduled", help="scheduled|manual|all (default: scheduled)")
|
||||
rt_plan.add_argument("--protect-bases", action="store_true", help="Also keep base snapshots referenced in meta")
|
||||
rt_plan.add_argument("--protect-bases", action="store_true", help="Also keep base snapshots referenced in meta (default: false)")
|
||||
rt_plan.set_defaults(_handler=cmd_retention_plan)
|
||||
|
||||
rt_apply = rt_sub.add_parser("apply", help="Apply retention plan (DESTRUCTIVE)")
|
||||
rt_apply.add_argument("host", help="Host name")
|
||||
rt_apply.add_argument("--kind", default="scheduled", help="scheduled|manual|all (default: scheduled)")
|
||||
rt_apply.add_argument("--protect-bases", action="store_true", help="Also keep base snapshots referenced in meta")
|
||||
rt_apply.add_argument("--protect-bases", action="store_true", help="Also keep base snapshots referenced in meta (default: false)")
|
||||
rt_apply.add_argument("--max-delete", type=int, default=10, help="Refuse to delete more than N snapshots (default: 10)")
|
||||
rt_apply.add_argument("--yes", action="store_true", help="Confirm deletion")
|
||||
rt_apply.set_defaults(_handler=cmd_retention_apply)
|
||||
|
||||
# schedule
|
||||
sch = sub.add_parser("schedule", help="Manage cron schedules in /etc/cron.d/pobsync")
|
||||
sch_sub = sch.add_subparsers(dest="schedule_cmd", required=True)
|
||||
|
||||
sch_create = sch_sub.add_parser("create", help="Create or update a schedule for a host")
|
||||
sch_create.add_argument("host", help="Host name")
|
||||
|
||||
mode = sch_create.add_mutually_exclusive_group(required=True)
|
||||
mode.add_argument("--cron", default=None, help='Raw cron expression (5 fields), e.g. "15 2 * * *"')
|
||||
mode.add_argument("--daily", default=None, help="Daily at HH:MM")
|
||||
mode.add_argument("--hourly", type=int, default=None, help="Hourly at minute N (0..59)")
|
||||
mode.add_argument("--weekly", action="store_true", help="Weekly schedule (requires --dow and --time)")
|
||||
mode.add_argument("--monthly", action="store_true", help="Monthly schedule (requires --day and --time)")
|
||||
|
||||
sch_create.add_argument("--dow", default=None, help="For --weekly: mon,tue,wed,thu,fri,sat,sun")
|
||||
sch_create.add_argument("--day", type=int, default=None, help="For --monthly: day of month (1..31)")
|
||||
sch_create.add_argument("--time", default=None, help="For --weekly/--monthly: HH:MM")
|
||||
|
||||
sch_create.add_argument("--user", default="root", help="Cron user field (default: root)")
|
||||
sch_create.add_argument("--cron-file", default=CRON_FILE_DEFAULT, help="Cron file path (default: /etc/cron.d/pobsync)")
|
||||
|
||||
sch_create.add_argument("--prune", action="store_true", help="Run retention prune after successful backup")
|
||||
sch_create.add_argument("--prune-max-delete", type=int, default=10, help="Prune guardrail (default: 10)")
|
||||
sch_create.add_argument("--prune-protect-bases", action="store_true", help="Prune with base protection (default: false)")
|
||||
sch_create.add_argument("--dry-run", action="store_true", help="Show actions, do not write")
|
||||
sch_create.set_defaults(_handler=cmd_schedule_create)
|
||||
|
||||
sch_list = sch_sub.add_parser("list", help="List schedules from /etc/cron.d/pobsync")
|
||||
sch_list.add_argument("--host", default=None, help="Filter by host")
|
||||
sch_list.add_argument("--cron-file", default=CRON_FILE_DEFAULT, help="Cron file path (default: /etc/cron.d/pobsync)")
|
||||
sch_list.set_defaults(_handler=cmd_schedule_list)
|
||||
|
||||
sch_remove = sch_sub.add_parser("remove", help="Remove schedule block for a host")
|
||||
sch_remove.add_argument("host", help="Host name")
|
||||
sch_remove.add_argument("--cron-file", default=CRON_FILE_DEFAULT, help="Cron file path (default: /etc/cron.d/pobsync)")
|
||||
sch_remove.add_argument("--dry-run", action="store_true", help="Show actions, do not write")
|
||||
sch_remove.set_defaults(_handler=cmd_schedule_remove)
|
||||
|
||||
return p
|
||||
|
||||
|
||||
def parse_retention(s: str) -> dict[str, int]:
|
||||
out: dict[str, int] = {}
|
||||
parts = [p.strip() for p in s.split(",") if p.strip()]
|
||||
for part in parts:
|
||||
if "=" not in part:
|
||||
raise ValueError(f"Invalid retention component: {part!r}")
|
||||
k, v = part.split("=", 1)
|
||||
k = k.strip()
|
||||
v = v.strip()
|
||||
if k not in {"daily", "weekly", "monthly", "yearly"}:
|
||||
raise ValueError(f"Invalid retention key: {k!r}")
|
||||
n = int(v)
|
||||
if n < 0:
|
||||
raise ValueError(f"Retention must be >= 0 for {k}")
|
||||
out[k] = n
|
||||
for k in ("daily", "weekly", "monthly", "yearly"):
|
||||
out.setdefault(k, 0)
|
||||
return out
|
||||
|
||||
|
||||
def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
if as_json:
|
||||
print(json.dumps(to_json_safe(result), indent=2, sort_keys=False))
|
||||
@@ -140,8 +188,20 @@ def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
for a in result["actions"]:
|
||||
print(f"- {a}")
|
||||
|
||||
if "action" in result:
|
||||
print(f"- {result['action']}")
|
||||
if "results" in result:
|
||||
for r in result["results"]:
|
||||
label = "OK" if r.get("ok") else "FAIL"
|
||||
name = r.get("check", "check")
|
||||
msg = r.get("message") or r.get("error") or ""
|
||||
extra = ""
|
||||
if "path" in r:
|
||||
extra = f" ({r['path']})"
|
||||
elif "host" in r:
|
||||
extra = f" ({r['host']})"
|
||||
line = f"- {label} {name}{extra}"
|
||||
if msg:
|
||||
line += f" {msg}"
|
||||
print(line)
|
||||
|
||||
if "hosts" in result:
|
||||
for h in result["hosts"]:
|
||||
@@ -166,7 +226,6 @@ def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
extra = " " + extra
|
||||
print(f"- {kind} {dirname} {status}{extra}")
|
||||
|
||||
# retention plan
|
||||
if "keep" in result and "delete" in result:
|
||||
keep = result.get("keep") or []
|
||||
delete = result.get("delete") or []
|
||||
@@ -199,19 +258,33 @@ def _print(result: dict[str, Any], as_json: bool) -> None:
|
||||
extra = " " + extra
|
||||
print(f" - {dirname}{extra}")
|
||||
|
||||
if "schedules" in result:
|
||||
for s in result["schedules"]:
|
||||
host = s.get("host", "?")
|
||||
cron = s.get("cron") or "unknown"
|
||||
user = s.get("user") or "unknown"
|
||||
|
||||
prune = bool(s.get("prune", False))
|
||||
prune_max = s.get("prune_max_delete", None)
|
||||
protect = bool(s.get("prune_protect_bases", False))
|
||||
|
||||
extra = ""
|
||||
if prune:
|
||||
extra = " prune"
|
||||
if isinstance(prune_max, int):
|
||||
extra += f" max_delete={prune_max}"
|
||||
if protect:
|
||||
extra += " protect_bases"
|
||||
|
||||
print(f"- {host} {cron} {user}{extra}")
|
||||
|
||||
|
||||
def cmd_install(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
backup_root = args.backup_root
|
||||
if backup_root is None and is_tty():
|
||||
backup_root = input("backup_root (absolute path, not '/'): ").strip() or None
|
||||
|
||||
from .cli import parse_retention # keep behavior consistent if you still use it elsewhere
|
||||
retention = parse_retention(args.retention)
|
||||
|
||||
result = run_install(
|
||||
prefix=prefix,
|
||||
backup_root=backup_root,
|
||||
backup_root=args.backup_root,
|
||||
retention=retention,
|
||||
dry_run=bool(args.dry_run),
|
||||
force=bool(args.force),
|
||||
@@ -220,60 +293,17 @@ def cmd_install(args: argparse.Namespace) -> int:
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def parse_retention(s: str) -> dict[str, int]:
|
||||
out: dict[str, int] = {}
|
||||
parts = [p.strip() for p in s.split(",") if p.strip()]
|
||||
for part in parts:
|
||||
if "=" not in part:
|
||||
raise ValueError(f"Invalid retention component: {part!r}")
|
||||
k, v = part.split("=", 1)
|
||||
k = k.strip()
|
||||
v = v.strip()
|
||||
if k not in {"daily", "weekly", "monthly", "yearly"}:
|
||||
raise ValueError(f"Invalid retention key: {k!r}")
|
||||
n = int(v)
|
||||
if n < 0:
|
||||
raise ValueError(f"Retention must be >= 0 for {k}")
|
||||
out[k] = n
|
||||
for k in ("daily", "weekly", "monthly", "yearly"):
|
||||
out.setdefault(k, 0)
|
||||
return out
|
||||
|
||||
|
||||
def cmd_init_host(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
|
||||
address = args.address
|
||||
if address is None and is_tty():
|
||||
address = input("address (hostname or ip): ").strip() or None
|
||||
if not address:
|
||||
raise ConfigError("--address is required (or interactive input)")
|
||||
|
||||
if args.retention is None:
|
||||
from .config.load import load_global_config
|
||||
|
||||
paths = PobsyncPaths(home=prefix)
|
||||
global_cfg = load_global_config(paths.global_config_path)
|
||||
retention = global_cfg.get("retention_defaults") or {
|
||||
"daily": 14,
|
||||
"weekly": 8,
|
||||
"monthly": 12,
|
||||
"yearly": 0,
|
||||
}
|
||||
else:
|
||||
retention = parse_retention(args.retention)
|
||||
|
||||
excludes_replace = args.exclude_replace if args.exclude_replace is not None else None
|
||||
|
||||
result = run_init_host(
|
||||
prefix=prefix,
|
||||
host=args.host,
|
||||
address=address,
|
||||
retention=retention,
|
||||
address=args.address,
|
||||
retention=args.retention,
|
||||
ssh_user=args.ssh_user,
|
||||
ssh_port=args.ssh_port,
|
||||
excludes_add=list(args.exclude_add),
|
||||
excludes_replace=excludes_replace,
|
||||
excludes_replace=args.exclude_replace,
|
||||
includes=list(args.include),
|
||||
dry_run=bool(args.dry_run),
|
||||
force=bool(args.force),
|
||||
@@ -284,12 +314,7 @@ def cmd_init_host(args: argparse.Namespace) -> int:
|
||||
|
||||
def cmd_doctor(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_doctor(
|
||||
prefix=prefix,
|
||||
host=args.host,
|
||||
connect=bool(args.connect),
|
||||
rsync_dry_run=bool(args.rsync_dry_run),
|
||||
)
|
||||
result = run_doctor(prefix=prefix, host=args.host, connect=bool(args.connect), rsync_dry_run=bool(args.rsync_dry_run))
|
||||
_print(result, as_json=bool(args.json))
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
@@ -304,12 +329,10 @@ def cmd_list_remotes(args: argparse.Namespace) -> int:
|
||||
def cmd_show_config(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_show_config(prefix=prefix, host=args.host, effective=bool(args.effective))
|
||||
|
||||
if args.json:
|
||||
_print(result, as_json=True)
|
||||
else:
|
||||
print(dump_yaml(result["config"]).rstrip())
|
||||
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
@@ -342,14 +365,7 @@ def cmd_snapshots_list(args: argparse.Namespace) -> int:
|
||||
|
||||
def cmd_snapshots_show(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_snapshots_show(
|
||||
prefix=prefix,
|
||||
host=args.host,
|
||||
kind=args.kind,
|
||||
dirname=args.dirname,
|
||||
tail=args.tail,
|
||||
)
|
||||
|
||||
result = run_snapshots_show(prefix=prefix, host=args.host, kind=args.kind, dirname=args.dirname, tail=args.tail)
|
||||
if args.json:
|
||||
_print(result, as_json=True)
|
||||
else:
|
||||
@@ -360,7 +376,6 @@ def cmd_snapshots_show(args: argparse.Namespace) -> int:
|
||||
print("\n# rsync.log (tail)")
|
||||
for line in result["log_tail"]:
|
||||
print(line)
|
||||
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
@@ -385,6 +400,42 @@ def cmd_retention_apply(args: argparse.Namespace) -> int:
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def cmd_schedule_create(args: argparse.Namespace) -> int:
|
||||
prefix = Path(args.prefix)
|
||||
result = run_schedule_create(
|
||||
host=args.host,
|
||||
prefix=prefix,
|
||||
cron_file=Path(args.cron_file),
|
||||
cron_expr=args.cron,
|
||||
daily=args.daily,
|
||||
hourly=args.hourly,
|
||||
weekly=bool(args.weekly),
|
||||
dow=args.dow,
|
||||
time=args.time,
|
||||
monthly=bool(args.monthly),
|
||||
day=args.day,
|
||||
user=args.user,
|
||||
prune=bool(args.prune),
|
||||
prune_max_delete=int(args.prune_max_delete),
|
||||
prune_protect_bases=bool(args.prune_protect_bases),
|
||||
dry_run=bool(args.dry_run),
|
||||
)
|
||||
_print(result, as_json=bool(args.json))
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def cmd_schedule_list(args: argparse.Namespace) -> int:
|
||||
result = run_schedule_list(cron_file=Path(args.cron_file), host=args.host)
|
||||
_print(result, as_json=bool(args.json))
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def cmd_schedule_remove(args: argparse.Namespace) -> int:
|
||||
result = run_schedule_remove(host=args.host, cron_file=Path(args.cron_file), dry_run=bool(args.dry_run))
|
||||
_print(result, as_json=bool(args.json))
|
||||
return 0 if result.get("ok") else 1
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None) -> int:
|
||||
parser = build_parser()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
162
src/pobsync/commands/schedule_create.py
Normal file
162
src/pobsync/commands/schedule_create.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional
|
||||
|
||||
from ..errors import ConfigError
|
||||
from ..schedule import (
|
||||
build_cron_expr_daily,
|
||||
build_cron_expr_hourly,
|
||||
build_cron_expr_monthly,
|
||||
build_cron_expr_weekly,
|
||||
normalize_cron_expr,
|
||||
render_host_block,
|
||||
upsert_host_block,
|
||||
validate_cron_expr,
|
||||
)
|
||||
from ..util import ensure_dir, sanitize_host, write_text_atomic
|
||||
|
||||
|
||||
def _choose_cron_expr(
|
||||
*,
|
||||
cron_expr: Optional[str],
|
||||
daily: Optional[str],
|
||||
hourly: Optional[int],
|
||||
weekly: bool,
|
||||
dow: Optional[str],
|
||||
time: Optional[str],
|
||||
monthly: bool,
|
||||
day: Optional[int],
|
||||
) -> str:
|
||||
modes = [
|
||||
("cron", cron_expr is not None),
|
||||
("daily", daily is not None),
|
||||
("hourly", hourly is not None),
|
||||
("weekly", bool(weekly)),
|
||||
("monthly", bool(monthly)),
|
||||
]
|
||||
chosen = [name for name, enabled in modes if enabled]
|
||||
if len(chosen) == 0:
|
||||
raise ConfigError("One of --cron/--daily/--hourly/--weekly/--monthly must be provided")
|
||||
if len(chosen) > 1:
|
||||
raise ConfigError("Choose exactly one of --cron/--daily/--hourly/--weekly/--monthly")
|
||||
|
||||
if cron_expr is not None:
|
||||
validate_cron_expr(cron_expr)
|
||||
return normalize_cron_expr(cron_expr)
|
||||
|
||||
if daily is not None:
|
||||
return build_cron_expr_daily(daily)
|
||||
|
||||
if hourly is not None:
|
||||
return build_cron_expr_hourly(hourly)
|
||||
|
||||
if weekly:
|
||||
if dow is None or time is None:
|
||||
raise ConfigError("--weekly requires --dow and --time")
|
||||
return build_cron_expr_weekly(dow, time)
|
||||
|
||||
# monthly
|
||||
if day is None or time is None:
|
||||
raise ConfigError("--monthly requires --day and --time")
|
||||
return build_cron_expr_monthly(day, time)
|
||||
|
||||
|
||||
def run_schedule_create(
|
||||
*,
|
||||
host: str,
|
||||
prefix: Path,
|
||||
cron_file: Path,
|
||||
cron_expr: Optional[str],
|
||||
daily: Optional[str],
|
||||
hourly: Optional[int],
|
||||
weekly: bool,
|
||||
dow: Optional[str],
|
||||
time: Optional[str],
|
||||
monthly: bool,
|
||||
day: Optional[int],
|
||||
user: str,
|
||||
prune: bool,
|
||||
prune_max_delete: int,
|
||||
prune_protect_bases: bool,
|
||||
dry_run: bool,
|
||||
) -> dict[str, Any]:
|
||||
host = sanitize_host(host)
|
||||
|
||||
if prune_max_delete < 0:
|
||||
raise ConfigError("--prune-max-delete must be >= 0")
|
||||
|
||||
expr = _choose_cron_expr(
|
||||
cron_expr=cron_expr,
|
||||
daily=daily,
|
||||
hourly=hourly,
|
||||
weekly=weekly,
|
||||
dow=dow,
|
||||
time=time,
|
||||
monthly=monthly,
|
||||
day=day,
|
||||
)
|
||||
|
||||
cmd = f"{prefix}/bin/pobsync --prefix {prefix} run-scheduled {host}"
|
||||
if prune:
|
||||
cmd += " --prune"
|
||||
cmd += f" --prune-max-delete {int(prune_max_delete)}"
|
||||
if prune_protect_bases:
|
||||
cmd += " --prune-protect-bases"
|
||||
|
||||
log_dir = Path("/var/log/pobsync")
|
||||
log_path = str(log_dir / f"{host}.cron.log")
|
||||
|
||||
block = render_host_block(
|
||||
host=host,
|
||||
cron_expr=expr,
|
||||
user=user,
|
||||
command=cmd,
|
||||
log_path=log_path,
|
||||
include_env=True,
|
||||
)
|
||||
|
||||
try:
|
||||
existing = cron_file.read_text(encoding="utf-8")
|
||||
except FileNotFoundError:
|
||||
existing = ""
|
||||
except PermissionError as e:
|
||||
raise ConfigError(f"Permission denied reading {cron_file}: {e}") from e
|
||||
except OSError as e:
|
||||
raise ConfigError(f"Failed reading {cron_file}: {e}") from e
|
||||
|
||||
had_block = f"# BEGIN POBSYNC host={host}" in existing
|
||||
new_content = upsert_host_block(existing, host, block)
|
||||
|
||||
action_word = "updated" if had_block else "created"
|
||||
actions = [
|
||||
f"schedule {action_word} host={host}",
|
||||
f"file {cron_file}",
|
||||
f"cron {expr}",
|
||||
f"user {user}",
|
||||
]
|
||||
|
||||
if prune:
|
||||
actions.append(f"prune enabled (max_delete={int(prune_max_delete)})")
|
||||
if prune_protect_bases:
|
||||
actions.append("prune protect_bases enabled")
|
||||
|
||||
if dry_run:
|
||||
actions.append("dry-run (no file written)")
|
||||
return {"ok": True, "actions": actions, "host": host, "cron_file": str(cron_file)}
|
||||
|
||||
# Best-effort ensure log dir exists
|
||||
try:
|
||||
ensure_dir(log_dir)
|
||||
except Exception:
|
||||
actions.append(f"warn: could not create {log_dir}")
|
||||
|
||||
try:
|
||||
write_text_atomic(cron_file, new_content)
|
||||
except PermissionError as e:
|
||||
raise ConfigError(f"Permission denied writing {cron_file}: {e}") from e
|
||||
except OSError as e:
|
||||
raise ConfigError(f"Failed writing {cron_file}: {e}") from e
|
||||
|
||||
return {"ok": True, "actions": actions, "host": host, "cron_file": str(cron_file)}
|
||||
|
||||
87
src/pobsync/commands/schedule_list.py
Normal file
87
src/pobsync/commands/schedule_list.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from ..errors import ConfigError
|
||||
from ..schedule import parse_cron_file
|
||||
from ..util import sanitize_host
|
||||
|
||||
|
||||
def _parse_prune_flags(command: Optional[str]) -> Dict[str, Any]:
|
||||
"""
|
||||
Best-effort parse of flags from the command string that we generate.
|
||||
"""
|
||||
if not command:
|
||||
return {"prune": False, "prune_max_delete": None, "prune_protect_bases": False}
|
||||
|
||||
tokens = command.split()
|
||||
prune = "--prune" in tokens
|
||||
protect = "--prune-protect-bases" in tokens
|
||||
|
||||
max_delete = None
|
||||
if "--prune-max-delete" in tokens:
|
||||
try:
|
||||
idx = tokens.index("--prune-max-delete")
|
||||
if idx + 1 < len(tokens):
|
||||
max_delete = int(tokens[idx + 1])
|
||||
except (ValueError, IndexError):
|
||||
max_delete = None
|
||||
|
||||
return {
|
||||
"prune": bool(prune),
|
||||
"prune_max_delete": max_delete,
|
||||
"prune_protect_bases": bool(protect),
|
||||
}
|
||||
|
||||
|
||||
def run_schedule_list(*, cron_file: Path, host: Optional[str]) -> dict[str, Any]:
|
||||
if host is not None:
|
||||
host = sanitize_host(host)
|
||||
|
||||
try:
|
||||
content = cron_file.read_text(encoding="utf-8")
|
||||
except FileNotFoundError:
|
||||
content = ""
|
||||
except PermissionError as e:
|
||||
raise ConfigError(f"Permission denied reading {cron_file}: {e}") from e
|
||||
except OSError as e:
|
||||
raise ConfigError(f"Failed reading {cron_file}: {e}") from e
|
||||
|
||||
blocks = parse_cron_file(content)
|
||||
|
||||
schedules: List[Dict[str, Any]] = []
|
||||
if host is not None:
|
||||
b = blocks.get(host)
|
||||
if b is None:
|
||||
return {"ok": True, "cron_file": str(cron_file), "schedules": []}
|
||||
|
||||
flags = _parse_prune_flags(b.command)
|
||||
schedules.append(
|
||||
{
|
||||
"host": b.host,
|
||||
"cron": b.cron_expr,
|
||||
"user": b.user,
|
||||
"command": b.command,
|
||||
"log_path": b.log_path,
|
||||
**flags,
|
||||
}
|
||||
)
|
||||
return {"ok": True, "cron_file": str(cron_file), "schedules": schedules}
|
||||
|
||||
for h in sorted(blocks.keys()):
|
||||
b = blocks[h]
|
||||
flags = _parse_prune_flags(b.command)
|
||||
schedules.append(
|
||||
{
|
||||
"host": b.host,
|
||||
"cron": b.cron_expr,
|
||||
"user": b.user,
|
||||
"command": b.command,
|
||||
"log_path": b.log_path,
|
||||
**flags,
|
||||
}
|
||||
)
|
||||
|
||||
return {"ok": True, "cron_file": str(cron_file), "schedules": schedules}
|
||||
|
||||
39
src/pobsync/commands/schedule_remove.py
Normal file
39
src/pobsync/commands/schedule_remove.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from ..errors import ConfigError
|
||||
from ..schedule import remove_host_block
|
||||
from ..util import sanitize_host, write_text_atomic
|
||||
|
||||
|
||||
def run_schedule_remove(*, host: str, cron_file: Path, dry_run: bool) -> dict[str, Any]:
|
||||
host = sanitize_host(host)
|
||||
|
||||
try:
|
||||
existing = cron_file.read_text(encoding="utf-8")
|
||||
except FileNotFoundError:
|
||||
existing = ""
|
||||
except PermissionError as e:
|
||||
raise ConfigError(f"Permission denied reading {cron_file}: {e}") from e
|
||||
except OSError as e:
|
||||
raise ConfigError(f"Failed reading {cron_file}: {e}") from e
|
||||
|
||||
new_content = remove_host_block(existing, host)
|
||||
|
||||
actions = [f"schedule remove host={host}", f"file {cron_file}"]
|
||||
|
||||
if dry_run:
|
||||
actions.append("dry-run (no file written)")
|
||||
return {"ok": True, "actions": actions, "host": host, "cron_file": str(cron_file)}
|
||||
|
||||
try:
|
||||
write_text_atomic(cron_file, new_content)
|
||||
except PermissionError as e:
|
||||
raise ConfigError(f"Permission denied writing {cron_file}: {e}") from e
|
||||
except OSError as e:
|
||||
raise ConfigError(f"Failed writing {cron_file}: {e}") from e
|
||||
|
||||
return {"ok": True, "actions": actions, "host": host, "cron_file": str(cron_file)}
|
||||
|
||||
235
src/pobsync/schedule.py
Normal file
235
src/pobsync/schedule.py
Normal file
@@ -0,0 +1,235 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
|
||||
CRON_FILE_DEFAULT = "/etc/cron.d/pobsync"
|
||||
BEGIN_PREFIX = "# BEGIN POBSYNC host="
|
||||
END_PREFIX = "# END POBSYNC host="
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ScheduleBlock:
|
||||
host: str
|
||||
raw_lines: List[str] # full block including begin/end markers
|
||||
cron_expr: Optional[str] # "m h dom mon dow"
|
||||
user: Optional[str]
|
||||
command: Optional[str]
|
||||
log_path: Optional[str]
|
||||
|
||||
|
||||
def normalize_cron_expr(expr: str) -> str:
|
||||
return " ".join(expr.strip().split())
|
||||
|
||||
|
||||
def validate_cron_expr(expr: str) -> None:
|
||||
parts = normalize_cron_expr(expr).split(" ")
|
||||
if len(parts) != 5:
|
||||
raise ValueError("cron expression must have exactly 5 fields (m h dom mon dow)")
|
||||
|
||||
|
||||
def parse_hhmm(s: str) -> Tuple[int, int]:
|
||||
s = s.strip()
|
||||
if ":" not in s:
|
||||
raise ValueError("time must be HH:MM")
|
||||
a, b = s.split(":", 1)
|
||||
if not a.isdigit() or not b.isdigit():
|
||||
raise ValueError("time must be HH:MM")
|
||||
h = int(a)
|
||||
m = int(b)
|
||||
if h < 0 or h > 23:
|
||||
raise ValueError("hour must be 0..23")
|
||||
if m < 0 or m > 59:
|
||||
raise ValueError("minute must be 0..59")
|
||||
return h, m
|
||||
|
||||
|
||||
def parse_dow(s: str) -> int:
|
||||
"""
|
||||
Accept: mon,tue,wed,thu,fri,sat,sun (case-insensitive)
|
||||
Return cron day-of-week number: 0=sun, 1=mon, ... 6=sat
|
||||
"""
|
||||
x = s.strip().lower()
|
||||
mapping = {
|
||||
"sun": 0,
|
||||
"mon": 1,
|
||||
"tue": 2,
|
||||
"wed": 3,
|
||||
"thu": 4,
|
||||
"fri": 5,
|
||||
"sat": 6,
|
||||
}
|
||||
if x not in mapping:
|
||||
raise ValueError("dow must be one of: mon,tue,wed,thu,fri,sat,sun")
|
||||
return mapping[x]
|
||||
|
||||
|
||||
def build_cron_expr_daily(hhmm: str) -> str:
|
||||
h, m = parse_hhmm(hhmm)
|
||||
return f"{m} {h} * * *"
|
||||
|
||||
|
||||
def build_cron_expr_hourly(minute: int = 0) -> str:
|
||||
if minute < 0 or minute > 59:
|
||||
raise ValueError("minute must be 0..59")
|
||||
return f"{minute} * * * *"
|
||||
|
||||
|
||||
def build_cron_expr_weekly(dow: str, hhmm: str) -> str:
|
||||
h, m = parse_hhmm(hhmm)
|
||||
dow_num = parse_dow(dow)
|
||||
return f"{m} {h} * * {dow_num}"
|
||||
|
||||
|
||||
def build_cron_expr_monthly(day: int, hhmm: str) -> str:
|
||||
if day < 1 or day > 31:
|
||||
raise ValueError("day must be 1..31")
|
||||
h, m = parse_hhmm(hhmm)
|
||||
return f"{m} {h} {day} * *"
|
||||
|
||||
|
||||
def render_host_block(
|
||||
host: str,
|
||||
cron_expr: str,
|
||||
user: str,
|
||||
command: str,
|
||||
log_path: Optional[str],
|
||||
include_env: bool = True,
|
||||
) -> str:
|
||||
validate_cron_expr(cron_expr)
|
||||
cron_expr = normalize_cron_expr(cron_expr)
|
||||
|
||||
lines: List[str] = []
|
||||
lines.append(f"{BEGIN_PREFIX}{host}")
|
||||
lines.append("# managed-by=pobsync")
|
||||
if include_env:
|
||||
lines.append("SHELL=/bin/sh")
|
||||
lines.append("PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin")
|
||||
|
||||
cron_line = f"{cron_expr} {user} {command}"
|
||||
if log_path:
|
||||
cron_line += f" >>{log_path} 2>&1"
|
||||
lines.append(cron_line)
|
||||
|
||||
lines.append(f"{END_PREFIX}{host}")
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def parse_cron_file(content: str) -> Dict[str, ScheduleBlock]:
|
||||
blocks: Dict[str, ScheduleBlock] = {}
|
||||
lines = content.splitlines()
|
||||
|
||||
i = 0
|
||||
while i < len(lines):
|
||||
line = lines[i]
|
||||
if line.startswith(BEGIN_PREFIX):
|
||||
host = line[len(BEGIN_PREFIX) :].strip()
|
||||
block_lines = [line]
|
||||
i += 1
|
||||
while i < len(lines):
|
||||
block_lines.append(lines[i])
|
||||
if lines[i].strip() == f"{END_PREFIX}{host}":
|
||||
break
|
||||
i += 1
|
||||
|
||||
cron_expr, user, command, log_path = _extract_cron_line(block_lines)
|
||||
blocks[host] = ScheduleBlock(
|
||||
host=host,
|
||||
raw_lines=block_lines,
|
||||
cron_expr=cron_expr,
|
||||
user=user,
|
||||
command=command,
|
||||
log_path=log_path,
|
||||
)
|
||||
i += 1
|
||||
|
||||
return blocks
|
||||
|
||||
|
||||
def _extract_cron_line(block_lines: List[str]) -> Tuple[Optional[str], Optional[str], Optional[str], Optional[str]]:
|
||||
for raw in block_lines:
|
||||
line = raw.strip()
|
||||
if not line:
|
||||
continue
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
# skip env-like lines
|
||||
if "=" in line and line.split("=", 1)[0].isidentifier():
|
||||
continue
|
||||
|
||||
parts = line.split()
|
||||
if len(parts) < 7:
|
||||
continue
|
||||
|
||||
cron_expr = " ".join(parts[0:5])
|
||||
user = parts[5]
|
||||
cmd = " ".join(parts[6:])
|
||||
|
||||
log_path = None
|
||||
if ">>" in cmd:
|
||||
before, after = cmd.split(">>", 1)
|
||||
cmd = before.rstrip()
|
||||
after_parts = after.strip().split()
|
||||
if after_parts:
|
||||
log_path = after_parts[0]
|
||||
|
||||
return cron_expr, user, cmd, log_path
|
||||
|
||||
return None, None, None, None
|
||||
|
||||
|
||||
def upsert_host_block(content: str, host: str, new_block: str) -> str:
|
||||
lines = content.splitlines()
|
||||
out: List[str] = []
|
||||
i = 0
|
||||
replaced = False
|
||||
|
||||
begin = f"{BEGIN_PREFIX}{host}"
|
||||
end = f"{END_PREFIX}{host}"
|
||||
|
||||
while i < len(lines):
|
||||
if lines[i].strip() == begin:
|
||||
replaced = True
|
||||
# skip until end marker (inclusive)
|
||||
i += 1
|
||||
while i < len(lines) and lines[i].strip() != end:
|
||||
i += 1
|
||||
if i < len(lines):
|
||||
i += 1 # skip end marker
|
||||
out.extend(new_block.rstrip("\n").splitlines())
|
||||
continue
|
||||
|
||||
out.append(lines[i])
|
||||
i += 1
|
||||
|
||||
if not replaced:
|
||||
if out and out[-1].strip() != "":
|
||||
out.append("")
|
||||
out.extend(new_block.rstrip("\n").splitlines())
|
||||
|
||||
return "\n".join(out).rstrip() + "\n"
|
||||
|
||||
|
||||
def remove_host_block(content: str, host: str) -> str:
|
||||
lines = content.splitlines()
|
||||
out: List[str] = []
|
||||
i = 0
|
||||
|
||||
begin = f"{BEGIN_PREFIX}{host}"
|
||||
end = f"{END_PREFIX}{host}"
|
||||
|
||||
while i < len(lines):
|
||||
if lines[i].strip() == begin:
|
||||
i += 1
|
||||
while i < len(lines) and lines[i].strip() != end:
|
||||
i += 1
|
||||
if i < len(lines):
|
||||
i += 1 # skip end marker
|
||||
continue
|
||||
|
||||
out.append(lines[i])
|
||||
i += 1
|
||||
|
||||
return "\n".join(out).rstrip() + "\n"
|
||||
|
||||
@@ -68,10 +68,10 @@ def write_yaml_atomic(path: Path, data: Any) -> None:
|
||||
Write YAML to `path` atomically.
|
||||
|
||||
Strategy:
|
||||
- Write to a temp file in the same directory
|
||||
- fsync temp file
|
||||
- os.replace(temp, path) (atomic on POSIX)
|
||||
- fsync directory entry (best-effort)
|
||||
- Write to a temp file in the same directory
|
||||
- fsync temp file
|
||||
- os.replace(temp, path) (atomic on POSIX)
|
||||
- fsync directory entry (best-effort)
|
||||
|
||||
This helps avoid partial/corrupt meta files on crashes.
|
||||
"""
|
||||
@@ -119,3 +119,57 @@ def write_yaml_atomic(path: Path, data: Any) -> None:
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def write_text_atomic(path: Path, content: str) -> None:
|
||||
"""
|
||||
Write text to `path` atomically.
|
||||
|
||||
Strategy:
|
||||
- Write to a temp file in the same directory
|
||||
- fsync temp file
|
||||
- os.replace(temp, path) (atomic on POSIX)
|
||||
- fsync directory entry (best-effort)
|
||||
|
||||
This helps avoid partial/corrupt files on crashes.
|
||||
"""
|
||||
parent = path.parent
|
||||
parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
tmp_fd: int | None = None
|
||||
tmp_path: Path | None = None
|
||||
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w",
|
||||
encoding="utf-8",
|
||||
dir=str(parent),
|
||||
prefix=path.name + ".",
|
||||
suffix=".tmp",
|
||||
delete=False,
|
||||
) as tf:
|
||||
tmp_fd = tf.fileno()
|
||||
tmp_path = Path(tf.name)
|
||||
tf.write(content)
|
||||
tf.flush()
|
||||
os.fsync(tmp_fd)
|
||||
|
||||
os.replace(str(tmp_path), str(path))
|
||||
|
||||
# Best-effort directory fsync (helps durability across power loss on some FS)
|
||||
try:
|
||||
dir_fd = os.open(str(parent), os.O_DIRECTORY)
|
||||
try:
|
||||
os.fsync(dir_fd)
|
||||
finally:
|
||||
os.close(dir_fd)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
finally:
|
||||
# If anything failed before replace(), try to clean up temp file
|
||||
if tmp_path is not None and tmp_path.exists():
|
||||
try:
|
||||
tmp_path.unlink()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
Reference in New Issue
Block a user