|
- #!.venv/bin/python
-
- # Scan filesystem to generate a list of files to back up, based on a
- # configuration file. Pass this list to borg to actually create the
- # backup. Execute a notification script on the remote server to
- # report the backup status.
-
- import os
- import re
- import sys
- import json
- import stat
- import time
- import select
- import pathlib
- import threading
- import subprocess
-
- import typing
-
- import yaml
- import wcmatch.glob # type: ignore
- import humanfriendly # type: ignore
-
- def pstr(path: bytes) -> str:
- return path.decode(errors='backslashreplace')
-
- def format_size(n: int) -> str:
- return humanfriendly.format_size(n, keep_width=True, binary=True)
-
- class Config:
- roots: list[bytes]
- max_file_size: typing.Optional[int]
- one_file_system: bool
- exclude_caches: bool
- exclude: list[bytes]
- force_include: list[bytes]
- notify_email: typing.Optional[str]
-
- def __init__(self, configfile: str):
- # Read config
- with open(configfile, 'r') as f:
- config = yaml.safe_load(f)
- self.one_file_system = config.get('one-file-system', False)
- self.exclude_caches = config.get('exclude-caches', False)
-
- if 'max-file-size' in config:
- self.max_file_size = humanfriendly.parse_size(
- config['max-file-size'])
- else:
- self.max_file_size = None
-
- raw = config.get('roots', '').encode().split(b'\n')
- self.roots = []
- for x in raw:
- if not len(x):
- continue
- self.roots.append(x)
- self.roots.sort(key=len)
-
- def process_match_list(config_name):
- raw = config.get(config_name, '').encode().split(b'\n')
- pats = []
- # Prepend '**/' to any relative patterns
- for x in raw:
- if not len(x):
- continue
- if x.startswith(b'/'):
- pats.append(x)
- else:
- pats.append(b'**/' + x)
- return pats
-
- self.exclude = process_match_list('exclude')
- self.force_include = process_match_list('force-include')
-
- self.notify_email = config.get('notify-email', None)
-
- # Compile patterns
- flags = (wcmatch.glob.GLOBSTAR |
- wcmatch.glob.DOTGLOB |
- wcmatch.glob.NODOTDIR |
- wcmatch.glob.EXTGLOB |
- wcmatch.glob.BRACE)
-
- # Path matches if it matches at least one regex in "a" and no
- # regex in "b"
- (a, b) = wcmatch.glob.translate(self.exclude, flags=flags)
- self.exclude_re = ([ re.compile(x) for x in a ],
- [ re.compile(x) for x in b ])
-
- (a, b) = wcmatch.glob.translate(self.force_include, flags=flags)
- self.force_include_re = ([ re.compile(x) for x in a ],
- [ re.compile(x) for x in b ])
-
- def match_re(self, re: tuple[list[typing.Pattern],
- list[typing.Pattern]], path: bytes):
- # Path matches if it matches at least one regex in
- # re[0] and no regex in re[1].
- for a in re[0]:
- if a.match(path):
- for b in re[1]:
- if b.match(path):
- return False
- return True
- return False
-
- class Backup:
- def __init__(self, config: Config, dry_run: bool):
- self.config = config
- self.dry_run = dry_run
- self.root_seen: dict[bytes, bool] = {}
-
- # Saved log messages
- self.logs: list[tuple[str, str]] = []
-
- def out(self, path: bytes):
- self.outfile.write(path + (b'\n' if self.dry_run else b'\0'))
-
- def log(self, letter: str, msg: str, bold: bool=False):
- colors = { 'E': 31, 'W': 33, 'I': 36 };
- c = colors[letter] if letter in colors else 0
- b = "" if bold else "\033[22m"
- sys.stderr.write(f"\033[1;{c}m{letter}:{b} {msg}\033[0m\n")
- self.logs.append((letter, msg))
-
- def run(self, outfile: typing.IO[bytes]):
- self.outfile = outfile
- for root in self.config.roots:
- if root in self.root_seen:
- self.log('I', f"ignoring root, already seen: {pstr(root)}")
- continue
-
- try:
- st = os.lstat(root)
- if not stat.S_ISDIR(st.st_mode):
- raise NotADirectoryError
- except FileNotFoundError:
- self.log('E', f"root does not exist: {pstr(root)}")
- continue
- except NotADirectoryError:
- self.log('E', f"root is not a directory: {pstr(root)}")
- continue
-
- self.log('I', f"processing root {pstr(root)}")
- self.scan(root)
-
- def scan(self, path: bytes, parent_st: os.stat_result=None):
- """If the given path should be backed up, print it. If it's
- a directory and its contents should be included, recurse.
- """
- try:
- st = os.lstat(path)
- is_dir = stat.S_ISDIR(st.st_mode)
- is_reg = stat.S_ISREG(st.st_mode)
- size = st.st_blocks * 512
-
- # Decorated path ends with a '/' if it's a directory.
- decorated_path = path
- if is_dir and not decorated_path.endswith(b'/'):
- decorated_path += b'/'
-
- # See if there's a reason to exclude it
- exclude_reason = None
-
- if self.config.match_re(self.config.exclude_re, decorated_path):
- # Config file says to exclude
- exclude_reason = ('I', f"skipping, excluded by config file")
-
- elif (self.config.one_file_system
- and parent_st is not None
- and is_dir
- and st.st_dev != parent_st.st_dev):
- # Crosses a mount point
- exclude_reason = ('I', "skipping, on different filesystem")
-
- elif (is_reg
- and self.config.max_file_size
- and size > self.config.max_file_size):
- # Too big
- a = format_size(size)
- b = format_size(self.config.max_file_size)
- exclude_reason = ('W', f"file size {a} exceeds limit {b}")
-
- # If we have a reason to exclude it, stop now unless it's
- # force-included
- force = self.config.match_re(self.config.force_include_re,
- decorated_path)
- if exclude_reason and not force:
- self.log(exclude_reason[0],
- f"{exclude_reason[1]}: {pstr(path)}")
- return
-
- # Print path for Borg
- self.out(path)
-
- # Process directories
- if is_dir:
-
- if path in self.config.roots:
- self.root_seen[path] = True
- if decorated_path in self.config.roots:
- self.root_seen[decorated_path] = True
-
- # Skip if it contains CACHEDIR.TAG
- # (mirroring the --exclude-caches borg option)
- if self.config.exclude_caches:
- try:
- tag = b'Signature: 8a477f597d28d172789f06886806bc55'
- with open(path + b'/CACHEDIR.TAG', 'rb') as f:
- if f.read(len(tag)) == tag:
- self.log(
- 'I', f"skipping, cache dir: {pstr(path)}")
- return
- except:
- pass
-
- # Recurse
- with os.scandir(path) as it:
- for entry in it:
- self.scan(path=entry.path, parent_st=st)
-
- except (FileNotFoundError,
- IsADirectoryError,
- NotADirectoryError,
- PermissionError) as e:
- self.log('E', f"can't read {pstr(path)}: {str(e)}")
- return
-
- def main(argv: list[str]):
- import argparse
-
- def humansize(string):
- return humanfriendly.parse_size(string)
-
- # Parse args
- parser = argparse.ArgumentParser(
- prog=argv[0],
- description="Back up the local system using borg",
- formatter_class=argparse.ArgumentDefaultsHelpFormatter)
-
- base = pathlib.Path(__file__).parent
- parser.add_argument('-c', '--config',
- help="Config file", default=str(base / "config.yaml"))
- parser.add_argument('-v', '--vars',
- help="Variables file", default=str(base / "vars.sh"))
- parser.add_argument('-n', '--dry-run', action="store_true",
- help="Just print log output, don't run borg")
- parser.add_argument('-d', '--debug', action="store_true",
- help="Print filenames for --dry-run")
-
- args = parser.parse_args()
- config = Config(args.config)
- backup = Backup(config, args.dry_run)
-
- # Parse variables from vars.sh
- hostname = os.uname().nodename
- borg_sh = str(base / "borg.sh")
- notify_sh = str(base / "notify.sh")
- try:
- with open(args.vars) as f:
- for line in f:
- m = re.match(r"\s*export\s*([A-Z_]+)=(.*)", line)
- if not m:
- continue
- var = m.group(1)
- value = m.group(2)
- if var == "HOSTNAME":
- hostname = value
- if var == "BORG":
- borg_sh = value
- if var == "BORG_DIR":
- notify_sh = str(pathlib.Path(value) / "notify.sh")
- except Exception as e:
- backup.log('W', f"failed to parse variables from {args.vars}: {str(e)}")
-
- # Run backup
- captured_output: list[bytes] = []
-
- if args.dry_run:
- if args.debug:
- backup.run(sys.stdout.buffer)
- else:
- with open(os.devnull, "wb") as out:
- backup.run(out)
- sys.stdout.flush()
- else:
- borg = subprocess.Popen([borg_sh,
- "create",
- "--verbose",
- "--progress",
- "--log-json",
- "--list",
- "--filter", "E",
- "--stats",
- "--checkpoint-interval", "900",
- "--compression", "zstd,3",
- "--paths-from-stdin",
- "--paths-delimiter", "\\0",
- "::" + hostname + "-{now:%Y%m%d-%H%M%S}"],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- if borg.stdin is None:
- raise Exception("no pipe")
-
- borg_saw_warnings = 0
- borg_saw_errors = 0
-
- # Use a thread to capture output
- def reader_thread(fh):
- last_progress = 0
- for line in fh:
- try:
- data = json.loads(line)
- if ((data['type'] == 'log_message' or
- data['type'] == 'progress_message')
- and 'message' in data):
-
- # Count warnings and errors, but ignore some.
- changed_msg = "file changed while we backed it up"
- if data['levelname'] == 'WARNING':
- prefix = "warning: "
- if changed_msg not in data['message']:
- borg_saw_warnings += 1
- elif data['levelname'] not in ('DEBUG', 'INFO'):
- prefix = "error: "
- borg_saw_errors += 1
- else:
- prefix = ""
-
- line = (prefix + data['message'] + '\n').encode()
- elif data['type'] == 'archive_progress':
- now = time.time()
- if now - last_progress > 10:
- last_progress = now
- def size(short: str, full: str) -> str:
- return f" {short}={format_size(data[full])}"
- line = (f"progress:" +
- f" files={data['nfiles']}" +
- size('orig', 'original_size') +
- size('comp', 'compressed_size') +
- size('dedup', 'deduplicated_size') +
- f" path={data['path']}" +
- "\n").encode()
- else:
- continue
- else:
- # ignore unknown progress line
- continue
- except Exception as e:
- # on error, print raw line
- pass
- sys.stdout.buffer.write(line)
- sys.stdout.flush()
- captured_output.append(line)
- fh.close()
- reader = threading.Thread(target=reader_thread, args=(borg.stdout,))
- reader.daemon = True
- reader.start()
-
- try:
- # Give borg some time to start, just to clean up stdout
- time.sleep(1)
- backup.run(borg.stdin)
- except BrokenPipeError:
- sys.stderr.write(f"broken pipe\n")
- finally:
- try:
- borg.stdin.close()
- except BrokenPipeError:
- pass
- borg.wait()
- reader.join()
- ret = borg.returncode
- if ret < 0:
- backup.log('E', f"borg exited with signal {-ret}")
- elif ret == 2 or borg_saw_errors:
- backup.log('E', f"borg exited with errors (ret={ret})")
- elif ret == 1 and borg_saw_warnings:
- backup.log('W', f"borg exited with warnings (ret={ret})")
- elif ret != 0:
- backup.log('E', f"borg exited with unknown error code {ret}")
-
- # See if we had any errors
- warnings = sum(1 for (letter, msg) in backup.logs if letter == 'W')
- errors = sum(1 for (letter, msg) in backup.logs if letter == 'E')
-
- def plural(num: int, word: str) -> str:
- suffix = "" if num == 1 else "s"
- return f"{num} {word}{suffix}"
-
- warnmsg = plural(warnings, "warning") if warnings else None
- errmsg = plural(errors, "error") if errors else None
-
- if not warnings and not errors:
- backup.log('I', f"backup successful", bold=True)
-
- else:
- if warnmsg:
- backup.log('W', f"reported {warnmsg}", bold=True)
- if errors:
- backup.log('E', f"reported {errmsg}", bold=True)
-
- # Send a notification of errors
- email = backup.config.notify_email
- if email and not args.dry_run:
- backup.log('I', f"sending error notification to {email}")
-
- # Show all of our warnings and errors. Use a ">" prefix
- # so warnings and errors get highlighted by the mail reader.
- body = [ "Logs from backup.py:" ]
- for (letter, msg) in backup.logs:
- if letter == "E" or letter == "W":
- prefix = ">"
- else:
- prefix = " "
- body.append(f"{prefix}{letter}: {msg}")
- body_text = "\n".join(body).encode()
-
- # Followed by borg output
- body_text += b"\n\nBorg output:\n" + b"".join(captured_output)
-
- # Subject summary
- if errmsg and warnmsg:
- summary = f"{errmsg}, {warnmsg}"
- elif errors:
- summary = errmsg or ""
- else:
- summary = warnmsg or ""
-
- # Call notify.sh
- res = subprocess.run([notify_sh, summary, email], input=body_text)
- if res.returncode != 0:
- backup.log('E', f"failed to send notification")
- errors += 1
-
- # Exit with an error code if we had any errors
- if errors:
- return 1
- return 0
-
- if __name__ == "__main__":
- import sys
- raise SystemExit(main(sys.argv))
|