From 3ecd0e731e342aeb4d39be0801e47349e72692fa Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Wed, 20 Mar 2024 11:58:59 -0700 Subject: [PATCH 1/5] Initial work on spot check schema and preparatory refactoring (#656). --- NEWS | 1 + borgmatic/actions/check.py | 339 +++++++++++- borgmatic/borg/check.py | 437 +++------------- borgmatic/config/generate.py | 19 +- borgmatic/config/schema.yaml | 124 +++-- tests/unit/actions/test_check.py | 506 +++++++++++++++++- tests/unit/borg/test_check.py | 793 ++++------------------------- tests/unit/config/test_generate.py | 53 +- 8 files changed, 1168 insertions(+), 1104 deletions(-) diff --git a/NEWS b/NEWS index 4edc861..9152b4b 100644 --- a/NEWS +++ b/NEWS @@ -5,6 +5,7 @@ * Add documentation about backing up containerized databases by configuring borgmatic to exec into a container to run a dump command: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#containers + * 1.8.9 * #311: Add custom dump/restore command options for MySQL and MariaDB. diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index 79e2023..7598d6d 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -1,12 +1,292 @@ +import datetime +import hashlib +import itertools import logging +import pathlib +import os +import borgmatic.borg.extract import borgmatic.borg.check +import borgmatic.borg.state import borgmatic.config.validate import borgmatic.hooks.command + +DEFAULT_CHECKS = ( + {'name': 'repository', 'frequency': '1 month'}, + {'name': 'archives', 'frequency': '1 month'}, +) + + logger = logging.getLogger(__name__) +def parse_checks(config, only_checks=None): + ''' + Given a configuration dict with a "checks" sequence of dicts and an optional list of override + checks, return a tuple of named checks to run. + + For example, given a config of: + + {'checks': ({'name': 'repository'}, {'name': 'archives'})} + + This will be returned as: + + ('repository', 'archives') + + If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value + has a name of "disabled", return an empty tuple, meaning that no checks should be run. + ''' + checks = only_checks or tuple( + check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS) + ) + checks = tuple(check.lower() for check in checks) + + if 'disabled' in checks: + logger.warning( + 'The "disabled" value for the "checks" option is deprecated and will be removed from a future release; use "skip_actions" instead' + ) + if len(checks) > 1: + logger.warning( + 'Multiple checks are configured, but one of them is "disabled"; not running any checks' + ) + return () + + return checks + + +def parse_frequency(frequency): + ''' + Given a frequency string with a number and a unit of time, return a corresponding + datetime.timedelta instance or None if the frequency is None or "always". + + For instance, given "3 weeks", return datetime.timedelta(weeks=3) + + Raise ValueError if the given frequency cannot be parsed. + ''' + if not frequency: + return None + + frequency = frequency.strip().lower() + + if frequency == 'always': + return None + + try: + number, time_unit = frequency.split(' ') + number = int(number) + except ValueError: + raise ValueError(f"Could not parse consistency check frequency '{frequency}'") + + if not time_unit.endswith('s'): + time_unit += 's' + + if time_unit == 'months': + number *= 30 + time_unit = 'days' + elif time_unit == 'years': + number *= 365 + time_unit = 'days' + + try: + return datetime.timedelta(**{time_unit: number}) + except TypeError: + raise ValueError(f"Could not parse consistency check frequency '{frequency}'") + + +def filter_checks_on_frequency( + config, + borg_repository_id, + checks, + force, + archives_check_id=None, +): + ''' + Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence + of checks, whether to force checks to run, and an ID for the archives check potentially being + run (if any), filter down those checks based on the configured "frequency" for each check as + compared to its check time file. + + In other words, a check whose check time file's timestamp is too new (based on the configured + frequency) will get cut from the returned sequence of checks. Example: + + config = { + 'checks': [ + { + 'name': 'archives', + 'frequency': '2 weeks', + }, + ] + } + + When this function is called with that config and "archives" in checks, "archives" will get + filtered out of the returned result if its check time file is newer than 2 weeks old, indicating + that it's not yet time to run that check again. + + Raise ValueError if a frequency cannot be parsed. + ''' + if not checks: + return checks + + filtered_checks = list(checks) + + if force: + return tuple(filtered_checks) + + for check_config in config.get('checks', DEFAULT_CHECKS): + check = check_config['name'] + if checks and check not in checks: + continue + + frequency_delta = parse_frequency(check_config.get('frequency')) + if not frequency_delta: + continue + + check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id) + if not check_time: + continue + + # If we've not yet reached the time when the frequency dictates we're ready for another + # check, skip this check. + if datetime.datetime.now() < check_time + frequency_delta: + remaining = check_time + frequency_delta - datetime.datetime.now() + logger.info( + f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)' + ) + filtered_checks.remove(check) + + return tuple(filtered_checks) + + +def make_archives_check_id(archive_filter_flags): + ''' + Given a sequence of flags to filter archives, return a unique hash corresponding to those + particular flags. If there are no flags, return None. + ''' + if not archive_filter_flags: + return None + + return hashlib.sha256(' '.join(archive_filter_flags).encode()).hexdigest() + + +def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None): + ''' + Given a configuration dict, a Borg repository ID, the name of a check type ("repository", + "archives", etc.), and a unique hash of the archives filter flags, return a path for recording + that check's time (the time of that check last occurring). + ''' + borgmatic_source_directory = os.path.expanduser( + config.get('borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) + ) + + if check_type in ('archives', 'data'): + return os.path.join( + borgmatic_source_directory, + 'checks', + borg_repository_id, + check_type, + archives_check_id if archives_check_id else 'all', + ) + + return os.path.join( + borgmatic_source_directory, + 'checks', + borg_repository_id, + check_type, + ) + + +def write_check_time(path): # pragma: no cover + ''' + Record a check time of now as the modification time of the given path. + ''' + logger.debug(f'Writing check time at {path}') + + os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True) + pathlib.Path(path, mode=0o600).touch() + + +def read_check_time(path): + ''' + Return the check time based on the modification time of the given path. Return None if the path + doesn't exist. + ''' + logger.debug(f'Reading check time from {path}') + + try: + return datetime.datetime.fromtimestamp(os.stat(path).st_mtime) + except FileNotFoundError: + return None + + +def probe_for_check_time(config, borg_repository_id, check, archives_check_id): + ''' + Given a configuration dict, a Borg repository ID, the name of a check type ("repository", + "archives", etc.), and a unique hash of the archives filter flags, return a the corresponding + check time or None if such a check time does not exist. + + When the check type is "archives" or "data", this function probes two different paths to find + the check time, e.g.: + + ~/.borgmatic/checks/1234567890/archives/9876543210 + ~/.borgmatic/checks/1234567890/archives/all + + ... and returns the maximum modification time of the files found (if any). The first path + represents a more specific archives check time (a check on a subset of archives), and the second + is a fallback to the last "all" archives check. + + For other check types, this function reads from a single check time path, e.g.: + + ~/.borgmatic/checks/1234567890/repository + ''' + check_times = ( + read_check_time(group[0]) + for group in itertools.groupby( + ( + make_check_time_path(config, borg_repository_id, check, archives_check_id), + make_check_time_path(config, borg_repository_id, check), + ) + ) + ) + + try: + return max(check_time for check_time in check_times if check_time) + except ValueError: + return None + + +def upgrade_check_times(config, borg_repository_id): + ''' + Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on + disk from old-style paths to new-style paths. + + Currently, the only upgrade performed is renaming an archive or data check path that looks like: + + ~/.borgmatic/checks/1234567890/archives + + to: + + ~/.borgmatic/checks/1234567890/archives/all + ''' + for check_type in ('archives', 'data'): + new_path = make_check_time_path(config, borg_repository_id, check_type, 'all') + old_path = os.path.dirname(new_path) + temporary_path = f'{old_path}.temp' + + if not os.path.isfile(old_path) and not os.path.isfile(temporary_path): + continue + + logger.debug(f'Upgrading archives check time from {old_path} to {new_path}') + + try: + os.rename(old_path, temporary_path) + except FileNotFoundError: + pass + + os.mkdir(old_path) + os.rename(temporary_path, new_path) + + def run_check( config_filename, repository, @@ -20,6 +300,8 @@ def run_check( ): ''' Run the "check" action for the given repository. + + Raise ValueError if the Borg repository ID cannot be determined. ''' if check_arguments.repository and not borgmatic.config.validate.repositories_match( repository, check_arguments.repository @@ -34,16 +316,69 @@ def run_check( global_arguments.dry_run, **hook_context, ) + logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks') - borgmatic.borg.check.check_archives( + repository_id = borgmatic.borg.check.get_repository_id( repository['path'], config, local_borg_version, - check_arguments, global_arguments, local_path=local_path, remote_path=remote_path, ) + upgrade_check_times(config, repository_id) + configured_checks = parse_checks(config, check_arguments.only_checks) + archive_filter_flags = borgmatic.borg.check.make_archive_filter_flags( + local_borg_version, config, configured_checks, check_arguments + ) + archives_check_id = make_archives_check_id(archive_filter_flags) + checks = filter_checks_on_frequency( + config, + repository_id, + configured_checks, + check_arguments.force, + archives_check_id, + ) + borg_specific_checks = set(checks).intersection({'repository', 'archives', 'data'}) + + if borg_specific_checks: + borgmatic.borg.check.check_archives( + repository['path'], + config, + local_borg_version, + check_arguments, + global_arguments, + borg_specific_checks, + archive_filter_flags, + local_path=local_path, + remote_path=remote_path, + ) + for check in borg_specific_checks: + write_check_time( + make_check_time_path(config, repository_id, check, archives_check_id) + ) + + if 'extract' in checks: + borgmatic.borg.extract.extract_last_archive_dry_run( + config, + local_borg_version, + global_arguments, + repository['path'], + config.get('lock_wait'), + local_path, + remote_path, + ) + write_check_time(make_check_time_path(config, repository_id, 'extract')) + + #if 'spot' in checks: + # TODO: + # count the number of files in source directories + # in a loop until the sample percentage (of the total source files) is met: + # pick a random file from source directories and calculate its sha256 sum + # extract the file from the latest archive (to stdout) and calculate its sha256 sum + # if the two checksums are equal, increment the matching files count + # if the percentage of matching files (of the total source files) < tolerance percentage, error + borgmatic.hooks.command.execute_hook( config.get('after_check'), config.get('umask'), diff --git a/borgmatic/borg/check.py b/borgmatic/borg/check.py index 034ed61..84808ab 100644 --- a/borgmatic/borg/check.py +++ b/borgmatic/borg/check.py @@ -1,172 +1,28 @@ import argparse -import datetime -import hashlib -import itertools import json import logging import os -import pathlib -from borgmatic.borg import environment, extract, feature, flags, rinfo, state +from borgmatic.borg import environment, feature, flags, rinfo from borgmatic.execute import DO_NOT_CAPTURE, execute_command -DEFAULT_CHECKS = ( - {'name': 'repository', 'frequency': '1 month'}, - {'name': 'archives', 'frequency': '1 month'}, -) - logger = logging.getLogger(__name__) -def parse_checks(config, only_checks=None): +def make_archive_filter_flags(local_borg_version, config, checks, check_arguments): ''' - Given a configuration dict with a "checks" sequence of dicts and an optional list of override - checks, return a tuple of named checks to run. + Given the local Borg version, a configuration dict, a parsed sequence of checks, and check + arguments as an argparse.Namespace instance, transform the checks into tuple of command-line + flags for filtering archives in a check command. - For example, given a config of: - - {'checks': ({'name': 'repository'}, {'name': 'archives'})} - - This will be returned as: - - ('repository', 'archives') - - If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value - has a name of "disabled", return an empty tuple, meaning that no checks should be run. + If "check_last" is set in the configuration and "archives" is in checks, then include a "--last" + flag. And if "prefix" is set in configuration and "archives" is in checks, then include a + "--match-archives" flag. ''' - checks = only_checks or tuple( - check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS) - ) - checks = tuple(check.lower() for check in checks) + check_last = config.get('check_last', None) + prefix = config.get('prefix') - if 'disabled' in checks: - logger.warning( - 'The "disabled" value for the "checks" option is deprecated and will be removed from a future release; use "skip_actions" instead' - ) - if len(checks) > 1: - logger.warning( - 'Multiple checks are configured, but one of them is "disabled"; not running any checks' - ) - return () - - return checks - - -def parse_frequency(frequency): - ''' - Given a frequency string with a number and a unit of time, return a corresponding - datetime.timedelta instance or None if the frequency is None or "always". - - For instance, given "3 weeks", return datetime.timedelta(weeks=3) - - Raise ValueError if the given frequency cannot be parsed. - ''' - if not frequency: - return None - - frequency = frequency.strip().lower() - - if frequency == 'always': - return None - - try: - number, time_unit = frequency.split(' ') - number = int(number) - except ValueError: - raise ValueError(f"Could not parse consistency check frequency '{frequency}'") - - if not time_unit.endswith('s'): - time_unit += 's' - - if time_unit == 'months': - number *= 30 - time_unit = 'days' - elif time_unit == 'years': - number *= 365 - time_unit = 'days' - - try: - return datetime.timedelta(**{time_unit: number}) - except TypeError: - raise ValueError(f"Could not parse consistency check frequency '{frequency}'") - - -def filter_checks_on_frequency( - config, - borg_repository_id, - checks, - force, - archives_check_id=None, -): - ''' - Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence - of checks, whether to force checks to run, and an ID for the archives check potentially being - run (if any), filter down those checks based on the configured "frequency" for each check as - compared to its check time file. - - In other words, a check whose check time file's timestamp is too new (based on the configured - frequency) will get cut from the returned sequence of checks. Example: - - config = { - 'checks': [ - { - 'name': 'archives', - 'frequency': '2 weeks', - }, - ] - } - - When this function is called with that config and "archives" in checks, "archives" will get - filtered out of the returned result if its check time file is newer than 2 weeks old, indicating - that it's not yet time to run that check again. - - Raise ValueError if a frequency cannot be parsed. - ''' - if not checks: - return checks - - filtered_checks = list(checks) - - if force: - return tuple(filtered_checks) - - for check_config in config.get('checks', DEFAULT_CHECKS): - check = check_config['name'] - if checks and check not in checks: - continue - - frequency_delta = parse_frequency(check_config.get('frequency')) - if not frequency_delta: - continue - - check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id) - if not check_time: - continue - - # If we've not yet reached the time when the frequency dictates we're ready for another - # check, skip this check. - if datetime.datetime.now() < check_time + frequency_delta: - remaining = check_time + frequency_delta - datetime.datetime.now() - logger.info( - f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)' - ) - filtered_checks.remove(check) - - return tuple(filtered_checks) - - -def make_archive_filter_flags( - local_borg_version, config, checks, check_arguments, check_last=None, prefix=None -): - ''' - Given the local Borg version, a configuration dict, a parsed sequence of checks, check arguments - as an argparse.Namespace instance, the check last value, and a consistency check prefix, - transform the checks into tuple of command-line flags for filtering archives in a check command. - - If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if - a prefix value is given and "archives" is in checks, then include a "--match-archives" flag. - ''' if 'archives' in checks or 'data' in checks: return (('--last', str(check_last)) if check_last else ()) + ( ( @@ -196,17 +52,6 @@ def make_archive_filter_flags( return () -def make_archives_check_id(archive_filter_flags): - ''' - Given a sequence of flags to filter archives, return a unique hash corresponding to those - particular flags. If there are no flags, return None. - ''' - if not archive_filter_flags: - return None - - return hashlib.sha256(' '.join(archive_filter_flags).encode()).hexdigest() - - def make_check_flags(checks, archive_filter_flags): ''' Given a parsed sequence of checks and a sequence of flags to filter archives, transform the @@ -240,144 +85,15 @@ def make_check_flags(checks, archive_filter_flags): ) -def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None): +def get_repository_id(repository_path, config, local_borg_version, global_arguments, local_path, remote_path): ''' - Given a configuration dict, a Borg repository ID, the name of a check type ("repository", - "archives", etc.), and a unique hash of the archives filter flags, return a path for recording - that check's time (the time of that check last occurring). - ''' - borgmatic_source_directory = os.path.expanduser( - config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) - ) + Given a local or remote repository path, a configuration dict, the local Borg version, global + arguments, and local/remote commands to run, return the corresponding Borg repository ID. - if check_type in ('archives', 'data'): - return os.path.join( - borgmatic_source_directory, - 'checks', - borg_repository_id, - check_type, - archives_check_id if archives_check_id else 'all', - ) - - return os.path.join( - borgmatic_source_directory, - 'checks', - borg_repository_id, - check_type, - ) - - -def write_check_time(path): # pragma: no cover - ''' - Record a check time of now as the modification time of the given path. - ''' - logger.debug(f'Writing check time at {path}') - - os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True) - pathlib.Path(path, mode=0o600).touch() - - -def read_check_time(path): - ''' - Return the check time based on the modification time of the given path. Return None if the path - doesn't exist. - ''' - logger.debug(f'Reading check time from {path}') - - try: - return datetime.datetime.fromtimestamp(os.stat(path).st_mtime) - except FileNotFoundError: - return None - - -def probe_for_check_time(config, borg_repository_id, check, archives_check_id): - ''' - Given a configuration dict, a Borg repository ID, the name of a check type ("repository", - "archives", etc.), and a unique hash of the archives filter flags, return a the corresponding - check time or None if such a check time does not exist. - - When the check type is "archives" or "data", this function probes two different paths to find - the check time, e.g.: - - ~/.borgmatic/checks/1234567890/archives/9876543210 - ~/.borgmatic/checks/1234567890/archives/all - - ... and returns the maximum modification time of the files found (if any). The first path - represents a more specific archives check time (a check on a subset of archives), and the second - is a fallback to the last "all" archives check. - - For other check types, this function reads from a single check time path, e.g.: - - ~/.borgmatic/checks/1234567890/repository - ''' - check_times = ( - read_check_time(group[0]) - for group in itertools.groupby( - ( - make_check_time_path(config, borg_repository_id, check, archives_check_id), - make_check_time_path(config, borg_repository_id, check), - ) - ) - ) - - try: - return max(check_time for check_time in check_times if check_time) - except ValueError: - return None - - -def upgrade_check_times(config, borg_repository_id): - ''' - Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on - disk from old-style paths to new-style paths. - - Currently, the only upgrade performed is renaming an archive or data check path that looks like: - - ~/.borgmatic/checks/1234567890/archives - - to: - - ~/.borgmatic/checks/1234567890/archives/all - ''' - for check_type in ('archives', 'data'): - new_path = make_check_time_path(config, borg_repository_id, check_type, 'all') - old_path = os.path.dirname(new_path) - temporary_path = f'{old_path}.temp' - - if not os.path.isfile(old_path) and not os.path.isfile(temporary_path): - continue - - logger.debug(f'Upgrading archives check time from {old_path} to {new_path}') - - try: - os.rename(old_path, temporary_path) - except FileNotFoundError: - pass - - os.mkdir(old_path) - os.rename(temporary_path, new_path) - - -def check_archives( - repository_path, - config, - local_borg_version, - check_arguments, - global_arguments, - local_path='borg', - remote_path=None, -): - ''' - Given a local or remote repository path, a configuration dict, the local Borg version, check - arguments as an argparse.Namespace instance, global arguments, and local/remote commands to run, - check the contained Borg archives for consistency. - - If there are no consistency checks to run, skip running them. - - Raises ValueError if the Borg repository ID cannot be determined. + Raise ValueError if the Borg repository ID cannot be determined. ''' try: - borg_repository_id = json.loads( + return json.loads( rinfo.display_repository_info( repository_path, config, @@ -391,82 +107,63 @@ def check_archives( except (json.JSONDecodeError, KeyError): raise ValueError(f'Cannot determine Borg repository ID for {repository_path}') - upgrade_check_times(config, borg_repository_id) - check_last = config.get('check_last', None) - prefix = config.get('prefix') - configured_checks = parse_checks(config, check_arguments.only_checks) - lock_wait = None +def check_archives( + repository_path, + config, + local_borg_version, + check_arguments, + global_arguments, + checks, + archive_filter_flags, + local_path='borg', + remote_path=None, +): + ''' + Given a local or remote repository path, a configuration dict, the local Borg version, check + arguments as an argparse.Namespace instance, global arguments, a set of named Borg checks to run + (some combination "repository", "archives", and/or "data"), archive filter flags, and + local/remote commands to run, check the contained Borg archives for consistency. + ''' + lock_wait = config.get('lock_wait') extra_borg_options = config.get('extra_borg_options', {}).get('check', '') - archive_filter_flags = make_archive_filter_flags( - local_borg_version, config, configured_checks, check_arguments, check_last, prefix - ) - archives_check_id = make_archives_check_id(archive_filter_flags) - checks = filter_checks_on_frequency( - config, - borg_repository_id, - configured_checks, - check_arguments.force, - archives_check_id, + verbosity_flags = () + if logger.isEnabledFor(logging.INFO): + verbosity_flags = ('--info',) + if logger.isEnabledFor(logging.DEBUG): + verbosity_flags = ('--debug', '--show-rc') + + full_command = ( + (local_path, 'check') + + (('--repair',) if check_arguments.repair else ()) + + make_check_flags(checks, archive_filter_flags) + + (('--remote-path', remote_path) if remote_path else ()) + + (('--log-json',) if global_arguments.log_json else ()) + + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + + verbosity_flags + + (('--progress',) if check_arguments.progress else ()) + + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) + + flags.make_repository_flags(repository_path, local_borg_version) ) - if set(checks).intersection({'repository', 'archives', 'data'}): - lock_wait = config.get('lock_wait') + borg_environment = environment.make_environment(config) + borg_exit_codes = config.get('borg_exit_codes') - verbosity_flags = () - if logger.isEnabledFor(logging.INFO): - verbosity_flags = ('--info',) - if logger.isEnabledFor(logging.DEBUG): - verbosity_flags = ('--debug', '--show-rc') - - full_command = ( - (local_path, 'check') - + (('--repair',) if check_arguments.repair else ()) - + make_check_flags(checks, archive_filter_flags) - + (('--remote-path', remote_path) if remote_path else ()) - + (('--log-json',) if global_arguments.log_json else ()) - + (('--lock-wait', str(lock_wait)) if lock_wait else ()) - + verbosity_flags - + (('--progress',) if check_arguments.progress else ()) - + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) - + flags.make_repository_flags(repository_path, local_borg_version) + # The Borg repair option triggers an interactive prompt, which won't work when output is + # captured. And progress messes with the terminal directly. + if check_arguments.repair or check_arguments.progress: + execute_command( + full_command, + output_file=DO_NOT_CAPTURE, + extra_environment=borg_environment, + borg_local_path=local_path, + borg_exit_codes=borg_exit_codes, ) - - borg_environment = environment.make_environment(config) - borg_exit_codes = config.get('borg_exit_codes') - - # The Borg repair option triggers an interactive prompt, which won't work when output is - # captured. And progress messes with the terminal directly. - if check_arguments.repair or check_arguments.progress: - execute_command( - full_command, - output_file=DO_NOT_CAPTURE, - extra_environment=borg_environment, - borg_local_path=local_path, - borg_exit_codes=borg_exit_codes, - ) - else: - execute_command( - full_command, - extra_environment=borg_environment, - borg_local_path=local_path, - borg_exit_codes=borg_exit_codes, - ) - - for check in checks: - write_check_time( - make_check_time_path(config, borg_repository_id, check, archives_check_id) - ) - - if 'extract' in checks: - extract.extract_last_archive_dry_run( - config, - local_borg_version, - global_arguments, - repository_path, - lock_wait, - local_path, - remote_path, + else: + execute_command( + full_command, + extra_environment=borg_environment, + borg_local_path=local_path, + borg_exit_codes=borg_exit_codes, ) - write_check_time(make_check_time_path(config, borg_repository_id, 'extract')) diff --git a/borgmatic/config/generate.py b/borgmatic/config/generate.py index ad5eaea..85313c7 100644 --- a/borgmatic/config/generate.py +++ b/borgmatic/config/generate.py @@ -21,6 +21,19 @@ def insert_newline_before_comment(config, field_name): ) +def get_properties(schema): + ''' + Given a schema dict, return its properties. But if it's got sub-schemas with multiple different + potential properties, returned their merged properties instead. + ''' + if 'oneOf' in schema: + return dict( + collections.ChainMap(*[sub_schema['properties'] for sub_schema in schema['oneOf']]) + ) + + return schema['properties'] + + def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): ''' Given a loaded configuration schema, generate and return sample config for it. Include comments @@ -40,7 +53,7 @@ def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): config = ruamel.yaml.comments.CommentedMap( [ (field_name, schema_to_sample_configuration(sub_schema, level + 1)) - for field_name, sub_schema in schema['properties'].items() + for field_name, sub_schema in get_properties(schema).items() ] ) indent = (level * INDENT) + (SEQUENCE_INDENT if parent_is_sequence else 0) @@ -151,7 +164,7 @@ def add_comments_to_configuration_sequence(config, schema, indent=0): return for field_name in config[0].keys(): - field_schema = schema['items']['properties'].get(field_name, {}) + field_schema = get_properties(schema['items']).get(field_name, {}) description = field_schema.get('description') # No description to use? Skip it. @@ -178,7 +191,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa if skip_first and index == 0: continue - field_schema = schema['properties'].get(field_name, {}) + field_schema = get_properties(schema).get(field_name, {}) description = field_schema.get('description', '').strip() # If this is an optional key, add an indicator to the comment flagging it to be commented diff --git a/borgmatic/config/schema.yaml b/borgmatic/config/schema.yaml index 61bdb6e..9e4759b 100644 --- a/borgmatic/config/schema.yaml +++ b/borgmatic/config/schema.yaml @@ -503,37 +503,99 @@ properties: type: array items: type: object - required: ['name'] - additionalProperties: false - properties: - name: - type: string - enum: - - repository - - archives - - data - - extract - - disabled - description: | - Name of consistency check to run: "repository", - "archives", "data", and/or "extract". "repository" - checks the consistency of the repository, "archives" - checks all of the archives, "data" verifies the - integrity of the data within the archives, and "extract" - does an extraction dry-run of the most recent archive. - Note that "data" implies "archives". See "skip_actions" - for disabling checks altogether. - example: repository - frequency: - type: string - description: | - How frequently to run this type of consistency check (as - a best effort). The value is a number followed by a unit - of time. E.g., "2 weeks" to run this consistency check - no more than every two weeks for a given repository or - "1 month" to run it no more than monthly. Defaults to - "always": running this check every time checks are run. - example: 2 weeks + oneOf: + - required: [name] + additionalProperties: false + properties: + name: + type: string + enum: + - repository + - archives + - data + - extract + - disabled + description: | + Name of consistency check to run: "repository", + "archives", "data", "spot", and/or "extract". + "repository" checks the consistency of the + repository, "archives" checks all of the + archives, "data" verifies the integrity of the + data within the archives, "spot" checks that + some percentage of source files are found in the + most recent archive (with identical contents), + and "extract" does an extraction dry-run of the + most recent archive. Note that "data" implies + "archives". See "skip_actions" for disabling + checks altogether. + example: spot + frequency: + type: string + description: | + How frequently to run this type of consistency + check (as a best effort). The value is a number + followed by a unit of time. E.g., "2 weeks" to + run this consistency check no more than every + two weeks for a given repository or "1 month" to + run it no more than monthly. Defaults to + "always": running this check every time checks + are run. + example: 2 weeks + - required: + - name + - sample_percentage + - tolerance_percentage + additionalProperties: false + properties: + name: + type: string + enum: + - spot + description: | + Name of consistency check to run: "repository", + "archives", "data", "spot", and/or "extract". + "repository" checks the consistency of the + repository, "archives" checks all of the + archives, "data" verifies the integrity of the + data within the archives, "spot" checks that + some percentage of source files are found in the + most recent archive (with identical contents), + and "extract" does an extraction dry-run of the + most recent archive. Note that "data" implies + "archives". See "skip_actions" for disabling + checks altogether. + example: repository + frequency: + type: string + description: | + How frequently to run this type of consistency + check (as a best effort). The value is a number + followed by a unit of time. E.g., "2 weeks" to + run this consistency check no more than every + two weeks for a given repository or "1 month" to + run it no more than monthly. Defaults to + "always": running this check every time checks + are run. + example: 2 weeks + sample_percentage: + type: number + description: | + The percentage of total files in the source + directories to randomly sample and compare to + their corresponding files in the most recent + backup archive. Only applies to the "spot" + check. + example: 5 + tolerance_percentage: + type: number + description: | + The percentage of total files in the source + directories that can fail a spot check + comparison without failing the entire + consistency check. Should be lower than or + equal to the "sample_percentage". Only applies + to the "spot" check. + example: 0.5 description: | List of one or more consistency checks to run on a periodic basis (if "frequency" is set) or every time borgmatic runs checks (if diff --git a/tests/unit/actions/test_check.py b/tests/unit/actions/test_check.py index 72798e0..83c840f 100644 --- a/tests/unit/actions/test_check.py +++ b/tests/unit/actions/test_check.py @@ -1,18 +1,433 @@ from flexmock import flexmock +import pytest from borgmatic.actions import check as module -def test_run_check_calls_hooks_for_configured_repository(): +def test_parse_checks_returns_them_as_tuple(): + checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'bar'}]}) + + assert checks == ('foo', 'bar') + + +def test_parse_checks_with_missing_value_returns_defaults(): + checks = module.parse_checks({}) + + assert checks == ('repository', 'archives') + + +def test_parse_checks_with_empty_list_returns_defaults(): + checks = module.parse_checks({'checks': []}) + + assert checks == ('repository', 'archives') + + +def test_parse_checks_with_none_value_returns_defaults(): + checks = module.parse_checks({'checks': None}) + + assert checks == ('repository', 'archives') + + +def test_parse_checks_with_disabled_returns_no_checks(): + checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'disabled'}]}) + + assert checks == () + + +def test_parse_checks_prefers_override_checks_to_configured_checks(): + checks = module.parse_checks( + {'checks': [{'name': 'archives'}]}, only_checks=['repository', 'extract'] + ) + + assert checks == ('repository', 'extract') + + +@pytest.mark.parametrize( + 'frequency,expected_result', + ( + (None, None), + ('always', None), + ('1 hour', module.datetime.timedelta(hours=1)), + ('2 hours', module.datetime.timedelta(hours=2)), + ('1 day', module.datetime.timedelta(days=1)), + ('2 days', module.datetime.timedelta(days=2)), + ('1 week', module.datetime.timedelta(weeks=1)), + ('2 weeks', module.datetime.timedelta(weeks=2)), + ('1 month', module.datetime.timedelta(days=30)), + ('2 months', module.datetime.timedelta(days=60)), + ('1 year', module.datetime.timedelta(days=365)), + ('2 years', module.datetime.timedelta(days=365 * 2)), + ), +) +def test_parse_frequency_parses_into_timedeltas(frequency, expected_result): + assert module.parse_frequency(frequency) == expected_result + + +@pytest.mark.parametrize( + 'frequency', + ( + 'sometime', + 'x days', + '3 decades', + ), +) +def test_parse_frequency_raises_on_parse_error(frequency): + with pytest.raises(ValueError): + module.parse_frequency(frequency) + + +def test_filter_checks_on_frequency_without_config_uses_default_checks(): + flexmock(module).should_receive('parse_frequency').and_return( + module.datetime.timedelta(weeks=4) + ) + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('probe_for_check_time').and_return(None) + + assert module.filter_checks_on_frequency( + config={}, + borg_repository_id='repo', + checks=('repository', 'archives'), + force=False, + archives_check_id='1234', + ) == ('repository', 'archives') + + +def test_filter_checks_on_frequency_retains_unconfigured_check(): + assert module.filter_checks_on_frequency( + config={}, + borg_repository_id='repo', + checks=('data',), + force=False, + ) == ('data',) + + +def test_filter_checks_on_frequency_retains_check_without_frequency(): + flexmock(module).should_receive('parse_frequency').and_return(None) + + assert module.filter_checks_on_frequency( + config={'checks': [{'name': 'archives'}]}, + borg_repository_id='repo', + checks=('archives',), + force=False, + archives_check_id='1234', + ) == ('archives',) + + +def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency(): + flexmock(module).should_receive('parse_frequency').and_return( + module.datetime.timedelta(hours=1) + ) + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('probe_for_check_time').and_return( + module.datetime.datetime(year=module.datetime.MINYEAR, month=1, day=1) + ) + + assert module.filter_checks_on_frequency( + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + borg_repository_id='repo', + checks=('archives',), + force=False, + archives_check_id='1234', + ) == ('archives',) + + +def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file(): + flexmock(module).should_receive('parse_frequency').and_return( + module.datetime.timedelta(hours=1) + ) + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('probe_for_check_time').and_return(None) + + assert module.filter_checks_on_frequency( + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + borg_repository_id='repo', + checks=('archives',), + force=False, + archives_check_id='1234', + ) == ('archives',) + + +def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency(): + flexmock(module).should_receive('parse_frequency').and_return( + module.datetime.timedelta(hours=1) + ) + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('probe_for_check_time').and_return( + module.datetime.datetime.now() + ) + + assert ( + module.filter_checks_on_frequency( + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + borg_repository_id='repo', + checks=('archives',), + force=False, + archives_check_id='1234', + ) + == () + ) + + +def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force(): + assert module.filter_checks_on_frequency( + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + borg_repository_id='repo', + checks=('archives',), + force=True, + archives_check_id='1234', + ) == ('archives',) + + +def test_filter_checks_on_frequency_passes_through_empty_checks(): + assert ( + module.filter_checks_on_frequency( + config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, + borg_repository_id='repo', + checks=(), + force=False, + archives_check_id='1234', + ) + == () + ) + + +def test_make_archives_check_id_with_flags_returns_a_value_and_does_not_raise(): + assert module.make_archives_check_id(('--match-archives', 'sh:foo-*')) + + +def test_make_archives_check_id_with_empty_flags_returns_none(): + assert module.make_archives_check_id(()) is None + + +def test_make_check_time_path_with_borgmatic_source_directory_includes_it(): + flexmock(module.os.path).should_receive('expanduser').with_args('~/.borgmatic').and_return( + '/home/user/.borgmatic' + ) + + assert ( + module.make_check_time_path( + {'borgmatic_source_directory': '~/.borgmatic'}, '1234', 'archives', '5678' + ) + == '/home/user/.borgmatic/checks/1234/archives/5678' + ) + + +def test_make_check_time_path_without_borgmatic_source_directory_uses_default(): + flexmock(module.os.path).should_receive('expanduser').with_args( + module.borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY + ).and_return('/home/user/.borgmatic') + + assert ( + module.make_check_time_path({}, '1234', 'archives', '5678') + == '/home/user/.borgmatic/checks/1234/archives/5678' + ) + + +def test_make_check_time_path_with_archives_check_and_no_archives_check_id_defaults_to_all(): + flexmock(module.os.path).should_receive('expanduser').with_args('~/.borgmatic').and_return( + '/home/user/.borgmatic' + ) + + assert ( + module.make_check_time_path( + {'borgmatic_source_directory': '~/.borgmatic'}, + '1234', + 'archives', + ) + == '/home/user/.borgmatic/checks/1234/archives/all' + ) + + +def test_make_check_time_path_with_repositories_check_ignores_archives_check_id(): + flexmock(module.os.path).should_receive('expanduser').with_args('~/.borgmatic').and_return( + '/home/user/.borgmatic' + ) + + assert ( + module.make_check_time_path( + {'borgmatic_source_directory': '~/.borgmatic'}, '1234', 'repository', '5678' + ) + == '/home/user/.borgmatic/checks/1234/repository' + ) + + +def test_read_check_time_does_not_raise(): + flexmock(module.os).should_receive('stat').and_return(flexmock(st_mtime=123)) + + assert module.read_check_time('/path') + + +def test_read_check_time_on_missing_file_does_not_raise(): + flexmock(module.os).should_receive('stat').and_raise(FileNotFoundError) + + assert module.read_check_time('/path') is None + + +def test_probe_for_check_time_uses_maximum_of_multiple_check_times(): + flexmock(module).should_receive('make_check_time_path').and_return( + '~/.borgmatic/checks/1234/archives/5678' + ).and_return('~/.borgmatic/checks/1234/archives/all') + flexmock(module).should_receive('read_check_time').and_return(1).and_return(2) + + assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 2 + + +def test_probe_for_check_time_deduplicates_identical_check_time_paths(): + flexmock(module).should_receive('make_check_time_path').and_return( + '~/.borgmatic/checks/1234/archives/5678' + ).and_return('~/.borgmatic/checks/1234/archives/5678') + flexmock(module).should_receive('read_check_time').and_return(1).once() + + assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 1 + + +def test_probe_for_check_time_skips_none_check_time(): + flexmock(module).should_receive('make_check_time_path').and_return( + '~/.borgmatic/checks/1234/archives/5678' + ).and_return('~/.borgmatic/checks/1234/archives/all') + flexmock(module).should_receive('read_check_time').and_return(None).and_return(2) + + assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 2 + + +def test_probe_for_check_time_uses_single_check_time(): + flexmock(module).should_receive('make_check_time_path').and_return( + '~/.borgmatic/checks/1234/archives/5678' + ).and_return('~/.borgmatic/checks/1234/archives/all') + flexmock(module).should_receive('read_check_time').and_return(1).and_return(None) + + assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 1 + + +def test_probe_for_check_time_returns_none_when_no_check_time_found(): + flexmock(module).should_receive('make_check_time_path').and_return( + '~/.borgmatic/checks/1234/archives/5678' + ).and_return('~/.borgmatic/checks/1234/archives/all') + flexmock(module).should_receive('read_check_time').and_return(None).and_return(None) + + assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) is None + + +def test_upgrade_check_times_renames_old_check_paths_to_all(): + base_path = '~/.borgmatic/checks/1234' + flexmock(module).should_receive('make_check_time_path').with_args( + object, object, 'archives', 'all' + ).and_return(f'{base_path}/archives/all') + flexmock(module).should_receive('make_check_time_path').with_args( + object, object, 'data', 'all' + ).and_return(f'{base_path}/data/all') + flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/archives').and_return( + True + ) + flexmock(module.os.path).should_receive('isfile').with_args( + f'{base_path}/archives.temp' + ).and_return(False) + flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/data').and_return( + False + ) + flexmock(module.os.path).should_receive('isfile').with_args( + f'{base_path}/data.temp' + ).and_return(False) + flexmock(module.os).should_receive('rename').with_args( + f'{base_path}/archives', f'{base_path}/archives.temp' + ).once() + flexmock(module.os).should_receive('mkdir').with_args(f'{base_path}/archives').once() + flexmock(module.os).should_receive('rename').with_args( + f'{base_path}/archives.temp', f'{base_path}/archives/all' + ).once() + + module.upgrade_check_times(flexmock(), flexmock()) + + +def test_upgrade_check_times_renames_data_check_paths_when_archives_paths_are_already_upgraded(): + base_path = '~/.borgmatic/checks/1234' + flexmock(module).should_receive('make_check_time_path').with_args( + object, object, 'archives', 'all' + ).and_return(f'{base_path}/archives/all') + flexmock(module).should_receive('make_check_time_path').with_args( + object, object, 'data', 'all' + ).and_return(f'{base_path}/data/all') + flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/archives').and_return( + False + ) + flexmock(module.os.path).should_receive('isfile').with_args( + f'{base_path}/archives.temp' + ).and_return(False) + flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/data').and_return( + True + ) + flexmock(module.os).should_receive('rename').with_args( + f'{base_path}/data', f'{base_path}/data.temp' + ).once() + flexmock(module.os).should_receive('mkdir').with_args(f'{base_path}/data').once() + flexmock(module.os).should_receive('rename').with_args( + f'{base_path}/data.temp', f'{base_path}/data/all' + ).once() + + module.upgrade_check_times(flexmock(), flexmock()) + + +def test_upgrade_check_times_skips_missing_check_paths(): + flexmock(module).should_receive('make_check_time_path').and_return( + '~/.borgmatic/checks/1234/archives/all' + ) + flexmock(module.os.path).should_receive('isfile').and_return(False) + flexmock(module.os).should_receive('rename').never() + flexmock(module.os).should_receive('mkdir').never() + + module.upgrade_check_times(flexmock(), flexmock()) + + +def test_upgrade_check_times_renames_stale_temporary_check_path(): + base_path = '~/.borgmatic/checks/1234' + flexmock(module).should_receive('make_check_time_path').with_args( + object, object, 'archives', 'all' + ).and_return(f'{base_path}/archives/all') + flexmock(module).should_receive('make_check_time_path').with_args( + object, object, 'data', 'all' + ).and_return(f'{base_path}/data/all') + flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/archives').and_return( + False + ) + flexmock(module.os.path).should_receive('isfile').with_args( + f'{base_path}/archives.temp' + ).and_return(True) + flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/data').and_return( + False + ) + flexmock(module.os.path).should_receive('isfile').with_args( + f'{base_path}/data.temp' + ).and_return(False) + flexmock(module.os).should_receive('rename').with_args( + f'{base_path}/archives', f'{base_path}/archives.temp' + ).and_raise(FileNotFoundError) + flexmock(module.os).should_receive('mkdir').with_args(f'{base_path}/archives').once() + flexmock(module.os).should_receive('rename').with_args( + f'{base_path}/archives.temp', f'{base_path}/archives/all' + ).once() + + module.upgrade_check_times(flexmock(), flexmock()) + + +def test_run_check_checks_archives_for_configured_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() + flexmock(module.borgmatic.borg.check).should_receive('get_repository_id').and_return(flexmock()) + flexmock(module).should_receive('upgrade_check_times') + flexmock(module).should_receive('parse_checks') + flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) + flexmock(module).should_receive('make_archives_check_id').and_return(None) + flexmock(module).should_receive('filter_checks_on_frequency').and_return({'repository', 'archives'}) flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('write_check_time') + flexmock(module.borgmatic.borg.extract).should_receive('extract_last_archive_dry_run').never() flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) check_arguments = flexmock( repository=None, progress=flexmock(), repair=flexmock(), - only=flexmock(), + only_checks=flexmock(), force=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) @@ -30,17 +445,98 @@ def test_run_check_calls_hooks_for_configured_repository(): ) -def test_run_check_runs_with_selected_repository(): +def test_run_check_runs_configured_extract_check(): + flexmock(module.logger).answer = lambda message: None + flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() + flexmock(module.borgmatic.borg.check).should_receive('get_repository_id').and_return(flexmock()) + flexmock(module).should_receive('upgrade_check_times') + flexmock(module).should_receive('parse_checks') + flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) + flexmock(module).should_receive('make_archives_check_id').and_return(None) + flexmock(module).should_receive('filter_checks_on_frequency').and_return({'extract'}) + flexmock(module.borgmatic.borg.check).should_receive('check_archives').never() + flexmock(module.borgmatic.borg.extract).should_receive('extract_last_archive_dry_run').once() + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('write_check_time') + flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) + check_arguments = flexmock( + repository=None, + progress=flexmock(), + repair=flexmock(), + only_checks=flexmock(), + force=flexmock(), + ) + global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) + + module.run_check( + config_filename='test.yaml', + repository={'path': 'repo'}, + config={'repositories': ['repo']}, + hook_context={}, + local_borg_version=None, + check_arguments=check_arguments, + global_arguments=global_arguments, + local_path=None, + remote_path=None, + ) + + +def test_run_check_without_checks_runs_nothing_except_hooks(): + flexmock(module.logger).answer = lambda message: None + flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() + flexmock(module.borgmatic.borg.check).should_receive('get_repository_id').and_return(flexmock()) + flexmock(module).should_receive('upgrade_check_times') + flexmock(module).should_receive('parse_checks') + flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) + flexmock(module).should_receive('make_archives_check_id').and_return(None) + flexmock(module).should_receive('filter_checks_on_frequency').and_return({}) + flexmock(module.borgmatic.borg.check).should_receive('check_archives').never() + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('write_check_time').never() + flexmock(module.borgmatic.borg.extract).should_receive('extract_last_archive_dry_run').never() + flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) + check_arguments = flexmock( + repository=None, + progress=flexmock(), + repair=flexmock(), + only_checks=flexmock(), + force=flexmock(), + ) + global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) + + module.run_check( + config_filename='test.yaml', + repository={'path': 'repo'}, + config={'repositories': ['repo']}, + hook_context={}, + local_borg_version=None, + check_arguments=check_arguments, + global_arguments=global_arguments, + local_path=None, + remote_path=None, + ) + + +def test_run_check_checks_archives_in_selected_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(True) + flexmock(module.borgmatic.borg.check).should_receive('get_repository_id').and_return(flexmock()) + flexmock(module).should_receive('upgrade_check_times') + flexmock(module).should_receive('parse_checks') + flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) + flexmock(module).should_receive('make_archives_check_id').and_return(None) + flexmock(module).should_receive('filter_checks_on_frequency').and_return({'repository', 'archives'}) flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('write_check_time') + flexmock(module.borgmatic.borg.extract).should_receive('extract_last_archive_dry_run').never() check_arguments = flexmock( repository=flexmock(), progress=flexmock(), repair=flexmock(), - only=flexmock(), + only_checks=flexmock(), force=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) @@ -68,7 +564,7 @@ def test_run_check_bails_if_repository_does_not_match(): repository=flexmock(), progress=flexmock(), repair=flexmock(), - only=flexmock(), + only_checks=flexmock(), force=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) diff --git a/tests/unit/borg/test_check.py b/tests/unit/borg/test_check.py index 323c338..280dc93 100644 --- a/tests/unit/borg/test_check.py +++ b/tests/unit/borg/test_check.py @@ -22,203 +22,15 @@ def insert_execute_command_never(): flexmock(module).should_receive('execute_command').never() -def test_parse_checks_returns_them_as_tuple(): - checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'bar'}]}) - - assert checks == ('foo', 'bar') - - -def test_parse_checks_with_missing_value_returns_defaults(): - checks = module.parse_checks({}) - - assert checks == ('repository', 'archives') - - -def test_parse_checks_with_empty_list_returns_defaults(): - checks = module.parse_checks({'checks': []}) - - assert checks == ('repository', 'archives') - - -def test_parse_checks_with_none_value_returns_defaults(): - checks = module.parse_checks({'checks': None}) - - assert checks == ('repository', 'archives') - - -def test_parse_checks_with_disabled_returns_no_checks(): - checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'disabled'}]}) - - assert checks == () - - -def test_parse_checks_prefers_override_checks_to_configured_checks(): - checks = module.parse_checks( - {'checks': [{'name': 'archives'}]}, only_checks=['repository', 'extract'] - ) - - assert checks == ('repository', 'extract') - - -@pytest.mark.parametrize( - 'frequency,expected_result', - ( - (None, None), - ('always', None), - ('1 hour', module.datetime.timedelta(hours=1)), - ('2 hours', module.datetime.timedelta(hours=2)), - ('1 day', module.datetime.timedelta(days=1)), - ('2 days', module.datetime.timedelta(days=2)), - ('1 week', module.datetime.timedelta(weeks=1)), - ('2 weeks', module.datetime.timedelta(weeks=2)), - ('1 month', module.datetime.timedelta(days=30)), - ('2 months', module.datetime.timedelta(days=60)), - ('1 year', module.datetime.timedelta(days=365)), - ('2 years', module.datetime.timedelta(days=365 * 2)), - ), -) -def test_parse_frequency_parses_into_timedeltas(frequency, expected_result): - assert module.parse_frequency(frequency) == expected_result - - -@pytest.mark.parametrize( - 'frequency', - ( - 'sometime', - 'x days', - '3 decades', - ), -) -def test_parse_frequency_raises_on_parse_error(frequency): - with pytest.raises(ValueError): - module.parse_frequency(frequency) - - -def test_filter_checks_on_frequency_without_config_uses_default_checks(): - flexmock(module).should_receive('parse_frequency').and_return( - module.datetime.timedelta(weeks=4) - ) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('probe_for_check_time').and_return(None) - - assert module.filter_checks_on_frequency( - config={}, - borg_repository_id='repo', - checks=('repository', 'archives'), - force=False, - archives_check_id='1234', - ) == ('repository', 'archives') - - -def test_filter_checks_on_frequency_retains_unconfigured_check(): - assert module.filter_checks_on_frequency( - config={}, - borg_repository_id='repo', - checks=('data',), - force=False, - ) == ('data',) - - -def test_filter_checks_on_frequency_retains_check_without_frequency(): - flexmock(module).should_receive('parse_frequency').and_return(None) - - assert module.filter_checks_on_frequency( - config={'checks': [{'name': 'archives'}]}, - borg_repository_id='repo', - checks=('archives',), - force=False, - archives_check_id='1234', - ) == ('archives',) - - -def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency(): - flexmock(module).should_receive('parse_frequency').and_return( - module.datetime.timedelta(hours=1) - ) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('probe_for_check_time').and_return( - module.datetime.datetime(year=module.datetime.MINYEAR, month=1, day=1) - ) - - assert module.filter_checks_on_frequency( - config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, - borg_repository_id='repo', - checks=('archives',), - force=False, - archives_check_id='1234', - ) == ('archives',) - - -def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file(): - flexmock(module).should_receive('parse_frequency').and_return( - module.datetime.timedelta(hours=1) - ) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('probe_for_check_time').and_return(None) - - assert module.filter_checks_on_frequency( - config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, - borg_repository_id='repo', - checks=('archives',), - force=False, - archives_check_id='1234', - ) == ('archives',) - - -def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency(): - flexmock(module).should_receive('parse_frequency').and_return( - module.datetime.timedelta(hours=1) - ) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('probe_for_check_time').and_return( - module.datetime.datetime.now() - ) - - assert ( - module.filter_checks_on_frequency( - config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, - borg_repository_id='repo', - checks=('archives',), - force=False, - archives_check_id='1234', - ) - == () - ) - - -def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force(): - assert module.filter_checks_on_frequency( - config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, - borg_repository_id='repo', - checks=('archives',), - force=True, - archives_check_id='1234', - ) == ('archives',) - - -def test_filter_checks_on_frequency_passes_through_empty_checks(): - assert ( - module.filter_checks_on_frequency( - config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, - borg_repository_id='repo', - checks=(), - force=False, - archives_check_id='1234', - ) - == () - ) - - def test_make_archive_filter_flags_with_default_checks_and_prefix_returns_default_flags(): flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( '1.2.3', - {}, + {'prefix': 'foo'}, ('repository', 'archives'), check_arguments=flexmock(match_archives=None), - prefix='foo', ) assert flags == ('--match-archives', 'sh:foo*') @@ -230,10 +42,9 @@ def test_make_archive_filter_flags_with_all_checks_and_prefix_returns_default_fl flags = module.make_archive_filter_flags( '1.2.3', - {}, + {'prefix': 'foo'}, ('repository', 'archives', 'extract'), check_arguments=flexmock(match_archives=None), - prefix='foo', ) assert flags == ('--match-archives', 'sh:foo*') @@ -245,10 +56,9 @@ def test_make_archive_filter_flags_with_all_checks_and_prefix_without_borg_featu flags = module.make_archive_filter_flags( '1.2.3', - {}, + {'prefix': 'foo'}, ('repository', 'archives', 'extract'), check_arguments=flexmock(match_archives=None), - prefix='foo', ) assert flags == ('--glob-archives', 'foo*') @@ -259,7 +69,7 @@ def test_make_archive_filter_flags_with_archives_check_and_last_includes_last_fl flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('archives',), check_arguments=flexmock(match_archives=None), check_last=3 + '1.2.3', {'check_last': 3}, ('archives',), check_arguments=flexmock(match_archives=None), ) assert flags == ('--last', '3') @@ -270,7 +80,7 @@ def test_make_archive_filter_flags_with_data_check_and_last_includes_last_flag() flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('data',), check_arguments=flexmock(match_archives=None), check_last=3 + '1.2.3', {'check_last': 3}, ('data',), check_arguments=flexmock(match_archives=None), ) assert flags == ('--last', '3') @@ -281,7 +91,7 @@ def test_make_archive_filter_flags_with_repository_check_and_last_omits_last_fla flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('repository',), check_arguments=flexmock(match_archives=None), check_last=3 + '1.2.3', {'check_last': 3}, ('repository',), check_arguments=flexmock(match_archives=None), ) assert flags == () @@ -293,10 +103,9 @@ def test_make_archive_filter_flags_with_default_checks_and_last_includes_last_fl flags = module.make_archive_filter_flags( '1.2.3', - {}, + {'check_last': 3}, ('repository', 'archives'), check_arguments=flexmock(match_archives=None), - check_last=3, ) assert flags == ('--last', '3') @@ -307,7 +116,7 @@ def test_make_archive_filter_flags_with_archives_check_and_prefix_includes_match flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('archives',), check_arguments=flexmock(match_archives=None), prefix='foo-' + '1.2.3', {'prefix': 'foo-'}, ('archives',), check_arguments=flexmock(match_archives=None), ) assert flags == ('--match-archives', 'sh:foo-*') @@ -318,7 +127,7 @@ def test_make_archive_filter_flags_with_data_check_and_prefix_includes_match_arc flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('data',), check_arguments=flexmock(match_archives=None), prefix='foo-' + '1.2.3', {'prefix': 'foo-'}, ('data',), check_arguments=flexmock(match_archives=None), ) assert flags == ('--match-archives', 'sh:foo-*') @@ -332,10 +141,9 @@ def test_make_archive_filter_flags_prefers_check_arguments_match_archives_to_con flags = module.make_archive_filter_flags( '1.2.3', - {'match_archives': 'bar-{now}'}, # noqa: FS003 + {'match_archives': 'bar-{now}', 'prefix': ''}, # noqa: FS003 ('archives',), check_arguments=flexmock(match_archives='baz-*'), - prefix='', ) assert flags == ('--match-archives', 'sh:baz-*') @@ -349,10 +157,9 @@ def test_make_archive_filter_flags_with_archives_check_and_empty_prefix_uses_arc flags = module.make_archive_filter_flags( '1.2.3', - {'archive_name_format': 'bar-{now}'}, # noqa: FS003 + {'archive_name_format': 'bar-{now}', 'prefix': ''}, # noqa: FS003 ('archives',), check_arguments=flexmock(match_archives=None), - prefix='', ) assert flags == ('--match-archives', 'sh:bar-*') @@ -363,7 +170,7 @@ def test_make_archive_filter_flags_with_archives_check_and_none_prefix_omits_mat flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('archives',), check_arguments=flexmock(match_archives=None), prefix=None + '1.2.3', {}, ('archives',), check_arguments=flexmock(match_archives=None), ) assert flags == () @@ -374,7 +181,7 @@ def test_make_archive_filter_flags_with_repository_check_and_prefix_omits_match_ flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('repository',), check_arguments=flexmock(match_archives=None), prefix='foo-' + '1.2.3', {'prefix': 'foo-'}, ('repository',), check_arguments=flexmock(match_archives=None), ) assert flags == () @@ -386,23 +193,14 @@ def test_make_archive_filter_flags_with_default_checks_and_prefix_includes_match flags = module.make_archive_filter_flags( '1.2.3', - {}, + {'prefix': 'foo-'}, ('repository', 'archives'), check_arguments=flexmock(match_archives=None), - prefix='foo-', ) assert flags == ('--match-archives', 'sh:foo-*') -def test_make_archives_check_id_with_flags_returns_a_value_and_does_not_raise(): - assert module.make_archives_check_id(('--match-archives', 'sh:foo-*')) - - -def test_make_archives_check_id_with_empty_flags_returns_none(): - assert module.make_archives_check_id(()) is None - - def test_make_check_flags_with_repository_check_returns_flag(): flags = module.make_check_flags(('repository',), ()) @@ -463,227 +261,56 @@ def test_make_check_flags_with_repository_and_data_checks_does_not_return_reposi assert flags == ('--verify-data',) -def test_make_check_time_path_with_borgmatic_source_directory_includes_it(): - flexmock(module.os.path).should_receive('expanduser').with_args('~/.borgmatic').and_return( - '/home/user/.borgmatic' - ) - - assert ( - module.make_check_time_path( - {'borgmatic_source_directory': '~/.borgmatic'}, '1234', 'archives', '5678' - ) - == '/home/user/.borgmatic/checks/1234/archives/5678' - ) - - -def test_make_check_time_path_without_borgmatic_source_directory_uses_default(): - flexmock(module.os.path).should_receive('expanduser').with_args( - module.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY - ).and_return('/home/user/.borgmatic') - - assert ( - module.make_check_time_path({}, '1234', 'archives', '5678') - == '/home/user/.borgmatic/checks/1234/archives/5678' - ) - - -def test_make_check_time_path_with_archives_check_and_no_archives_check_id_defaults_to_all(): - flexmock(module.os.path).should_receive('expanduser').with_args('~/.borgmatic').and_return( - '/home/user/.borgmatic' - ) - - assert ( - module.make_check_time_path( - {'borgmatic_source_directory': '~/.borgmatic'}, - '1234', - 'archives', - ) - == '/home/user/.borgmatic/checks/1234/archives/all' - ) - - -def test_make_check_time_path_with_repositories_check_ignores_archives_check_id(): - flexmock(module.os.path).should_receive('expanduser').with_args('~/.borgmatic').and_return( - '/home/user/.borgmatic' - ) - - assert ( - module.make_check_time_path( - {'borgmatic_source_directory': '~/.borgmatic'}, '1234', 'repository', '5678' - ) - == '/home/user/.borgmatic/checks/1234/repository' - ) - - -def test_read_check_time_does_not_raise(): - flexmock(module.os).should_receive('stat').and_return(flexmock(st_mtime=123)) - - assert module.read_check_time('/path') - - -def test_read_check_time_on_missing_file_does_not_raise(): - flexmock(module.os).should_receive('stat').and_raise(FileNotFoundError) - - assert module.read_check_time('/path') is None - - -def test_probe_for_check_time_uses_maximum_of_multiple_check_times(): - flexmock(module).should_receive('make_check_time_path').and_return( - '~/.borgmatic/checks/1234/archives/5678' - ).and_return('~/.borgmatic/checks/1234/archives/all') - flexmock(module).should_receive('read_check_time').and_return(1).and_return(2) - - assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 2 - - -def test_probe_for_check_time_deduplicates_identical_check_time_paths(): - flexmock(module).should_receive('make_check_time_path').and_return( - '~/.borgmatic/checks/1234/archives/5678' - ).and_return('~/.borgmatic/checks/1234/archives/5678') - flexmock(module).should_receive('read_check_time').and_return(1).once() - - assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 1 - - -def test_probe_for_check_time_skips_none_check_time(): - flexmock(module).should_receive('make_check_time_path').and_return( - '~/.borgmatic/checks/1234/archives/5678' - ).and_return('~/.borgmatic/checks/1234/archives/all') - flexmock(module).should_receive('read_check_time').and_return(None).and_return(2) - - assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 2 - - -def test_probe_for_check_time_uses_single_check_time(): - flexmock(module).should_receive('make_check_time_path').and_return( - '~/.borgmatic/checks/1234/archives/5678' - ).and_return('~/.borgmatic/checks/1234/archives/all') - flexmock(module).should_receive('read_check_time').and_return(1).and_return(None) - - assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) == 1 - - -def test_probe_for_check_time_returns_none_when_no_check_time_found(): - flexmock(module).should_receive('make_check_time_path').and_return( - '~/.borgmatic/checks/1234/archives/5678' - ).and_return('~/.borgmatic/checks/1234/archives/all') - flexmock(module).should_receive('read_check_time').and_return(None).and_return(None) - - assert module.probe_for_check_time(flexmock(), flexmock(), flexmock(), flexmock()) is None - - -def test_upgrade_check_times_renames_old_check_paths_to_all(): - base_path = '~/.borgmatic/checks/1234' - flexmock(module).should_receive('make_check_time_path').with_args( - object, object, 'archives', 'all' - ).and_return(f'{base_path}/archives/all') - flexmock(module).should_receive('make_check_time_path').with_args( - object, object, 'data', 'all' - ).and_return(f'{base_path}/data/all') - flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/archives').and_return( - True - ) - flexmock(module.os.path).should_receive('isfile').with_args( - f'{base_path}/archives.temp' - ).and_return(False) - flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/data').and_return( - False - ) - flexmock(module.os.path).should_receive('isfile').with_args( - f'{base_path}/data.temp' - ).and_return(False) - flexmock(module.os).should_receive('rename').with_args( - f'{base_path}/archives', f'{base_path}/archives.temp' - ).once() - flexmock(module.os).should_receive('mkdir').with_args(f'{base_path}/archives').once() - flexmock(module.os).should_receive('rename').with_args( - f'{base_path}/archives.temp', f'{base_path}/archives/all' - ).once() - - module.upgrade_check_times(flexmock(), flexmock()) - - -def test_upgrade_check_times_renames_data_check_paths_when_archives_paths_are_already_upgraded(): - base_path = '~/.borgmatic/checks/1234' - flexmock(module).should_receive('make_check_time_path').with_args( - object, object, 'archives', 'all' - ).and_return(f'{base_path}/archives/all') - flexmock(module).should_receive('make_check_time_path').with_args( - object, object, 'data', 'all' - ).and_return(f'{base_path}/data/all') - flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/archives').and_return( - False - ) - flexmock(module.os.path).should_receive('isfile').with_args( - f'{base_path}/archives.temp' - ).and_return(False) - flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/data').and_return( - True - ) - flexmock(module.os).should_receive('rename').with_args( - f'{base_path}/data', f'{base_path}/data.temp' - ).once() - flexmock(module.os).should_receive('mkdir').with_args(f'{base_path}/data').once() - flexmock(module.os).should_receive('rename').with_args( - f'{base_path}/data.temp', f'{base_path}/data/all' - ).once() - - module.upgrade_check_times(flexmock(), flexmock()) - - -def test_upgrade_check_times_skips_missing_check_paths(): - flexmock(module).should_receive('make_check_time_path').and_return( - '~/.borgmatic/checks/1234/archives/all' - ) - flexmock(module.os.path).should_receive('isfile').and_return(False) - flexmock(module.os).should_receive('rename').never() - flexmock(module.os).should_receive('mkdir').never() - - module.upgrade_check_times(flexmock(), flexmock()) - - -def test_upgrade_check_times_renames_stale_temporary_check_path(): - base_path = '~/.borgmatic/checks/1234' - flexmock(module).should_receive('make_check_time_path').with_args( - object, object, 'archives', 'all' - ).and_return(f'{base_path}/archives/all') - flexmock(module).should_receive('make_check_time_path').with_args( - object, object, 'data', 'all' - ).and_return(f'{base_path}/data/all') - flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/archives').and_return( - False - ) - flexmock(module.os.path).should_receive('isfile').with_args( - f'{base_path}/archives.temp' - ).and_return(True) - flexmock(module.os.path).should_receive('isfile').with_args(f'{base_path}/data').and_return( - False - ) - flexmock(module.os.path).should_receive('isfile').with_args( - f'{base_path}/data.temp' - ).and_return(False) - flexmock(module.os).should_receive('rename').with_args( - f'{base_path}/archives', f'{base_path}/archives.temp' - ).and_raise(FileNotFoundError) - flexmock(module.os).should_receive('mkdir').with_args(f'{base_path}/archives').once() - flexmock(module.os).should_receive('rename').with_args( - f'{base_path}/archives.temp', f'{base_path}/archives/all' - ).once() - - module.upgrade_check_times(flexmock(), flexmock()) - - -def test_check_archives_with_progress_passes_through_to_borg(): - checks = ('repository',) - config = {'check_last': None} +def test_get_repository_id_with_valid_json_does_not_raise(): + config = {} flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + + assert module.get_repository_id( + repository_path='repo', + config=config, + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + local_path='borg', + remote_path=None, + ) + + +def test_get_repository_id_with_json_error_raises(): + config = {} + flexmock(module.rinfo).should_receive('display_repository_info').and_return( + '{"unexpected": {"id": "repo"}}' + ) + + with pytest.raises(ValueError): + module.get_repository_id( + repository_path='repo', + config=config, + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + local_path='borg', + remote_path=None, + ) + + +def test_get_repository_id_with_missing_json_keys_raises(): + config = {} + flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON') + + with pytest.raises(ValueError): + module.get_repository_id( + repository_path='repo', + config=config, + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + local_path='borg', + remote_path=None, + ) + + +def test_check_archives_with_progress_passes_through_to_borg(): + config = {} flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module).should_receive('execute_command').never() flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) @@ -695,8 +322,6 @@ def test_check_archives_with_progress_passes_through_to_borg(): borg_local_path='borg', borg_exit_codes=None, ).once() - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -706,20 +331,13 @@ def test_check_archives_with_progress_passes_through_to_borg(): progress=True, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks={'repository'}, + archive_filter_flags=(), ) def test_check_archives_with_repair_passes_through_to_borg(): - checks = ('repository',) - config = {'check_last': None} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {} flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module).should_receive('execute_command').never() flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) @@ -731,8 +349,6 @@ def test_check_archives_with_repair_passes_through_to_borg(): borg_local_path='borg', borg_exit_codes=None, ).once() - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -742,6 +358,8 @@ def test_check_archives_with_repair_passes_through_to_borg(): progress=None, repair=True, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks={'repository'}, + archive_filter_flags=(), ) @@ -755,98 +373,10 @@ def test_check_archives_with_repair_passes_through_to_borg(): ), ) def test_check_archives_calls_borg_with_parameters(checks): - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') - - module.check_archives( - repository_path='repo', - config=config, - local_borg_version='1.2.3', - check_arguments=flexmock( - progress=None, repair=None, only_checks=None, force=None, match_archives=None - ), - global_arguments=flexmock(log_json=False), - ) - - -def test_check_archives_with_json_error_raises(): - checks = ('archives',) - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"unexpected": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) - - with pytest.raises(ValueError): - module.check_archives( - repository_path='repo', - config=config, - local_borg_version='1.2.3', - check_arguments=flexmock( - progress=None, repair=None, only_checks=None, force=None, match_archives=None - ), - global_arguments=flexmock(log_json=False), - ) - - -def test_check_archives_with_missing_json_keys_raises(): - checks = ('archives',) - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON') - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) - - with pytest.raises(ValueError): - module.check_archives( - repository_path='repo', - config=config, - local_borg_version='1.2.3', - check_arguments=flexmock( - progress=None, repair=None, only_checks=None, force=None, match_archives=None - ), - global_arguments=flexmock(log_json=False), - ) - - -def test_check_archives_with_extract_check_calls_extract_only(): - checks = ('extract',) - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) - flexmock(module).should_receive('make_check_flags').never() - flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) - flexmock(module.extract).should_receive('extract_last_archive_dry_run').once() - flexmock(module).should_receive('write_check_time') - insert_execute_command_never() module.check_archives( repository_path='repo', @@ -856,26 +386,17 @@ def test_check_archives_with_extract_check_calls_extract_only(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks=checks, + archive_filter_flags=(), ) def test_check_archives_with_log_info_passes_through_to_borg(): - checks = ('repository',) - config = {'check_last': None} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {} flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_logging_mock(logging.INFO) insert_execute_command_mock(('borg', 'check', '--info', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -885,49 +406,17 @@ def test_check_archives_with_log_info_passes_through_to_borg(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks={'repository'}, + archive_filter_flags=(), ) def test_check_archives_with_log_debug_passes_through_to_borg(): - checks = ('repository',) - config = {'check_last': None} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {} flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_logging_mock(logging.DEBUG) insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') - - module.check_archives( - repository_path='repo', - config=config, - local_borg_version='1.2.3', - check_arguments=flexmock( - progress=None, repair=None, only_checks=None, force=None, match_archives=None - ), - global_arguments=flexmock(log_json=False), - ) - - -def test_check_archives_without_any_checks_bails(): - config = {'check_last': None} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(()) - insert_execute_command_never() module.check_archives( repository_path='repo', @@ -937,26 +426,17 @@ def test_check_archives_without_any_checks_bails(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks={'repository'}, + archive_filter_flags=(), ) def test_check_archives_with_local_path_calls_borg_via_local_path(): - checks = ('repository',) - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + checks = {'repository'} + config = {} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg1', 'check', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -966,28 +446,19 @@ def test_check_archives_with_local_path_calls_borg_via_local_path(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks=checks, + archive_filter_flags=(), local_path='borg1', ) def test_check_archives_with_exit_codes_calls_borg_using_them(): - checks = ('repository',) - check_last = flexmock() + checks = {'repository'} borg_exit_codes = flexmock() - config = {'check_last': check_last, 'borg_exit_codes': borg_exit_codes} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {'borg_exit_codes': borg_exit_codes} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', 'repo'), borg_exit_codes=borg_exit_codes) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -997,26 +468,17 @@ def test_check_archives_with_exit_codes_calls_borg_using_them(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks=checks, + archive_filter_flags=(), ) def test_check_archives_with_remote_path_passes_through_to_borg(): - checks = ('repository',) - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + checks = {'repository'} + config = {} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -1026,27 +488,18 @@ def test_check_archives_with_remote_path_passes_through_to_borg(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks=checks, + archive_filter_flags=(), remote_path='borg1', ) def test_check_archives_with_log_json_passes_through_to_borg(): - checks = ('repository',) - check_last = flexmock() - config = {'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + checks = {'repository'} + config = {} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--log-json', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -1056,26 +509,17 @@ def test_check_archives_with_log_json_passes_through_to_borg(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=True), + checks=checks, + archive_filter_flags=(), ) def test_check_archives_with_lock_wait_passes_through_to_borg(): - checks = ('repository',) - check_last = flexmock() - config = {'lock_wait': 5, 'check_last': check_last} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + checks = {'repository'} + config = {'lock_wait': 5} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -1085,27 +529,18 @@ def test_check_archives_with_lock_wait_passes_through_to_borg(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks=checks, + archive_filter_flags=(), ) def test_check_archives_with_retention_prefix(): - checks = ('repository',) - check_last = flexmock() + checks = {'repository'} prefix = 'foo-' - config = {'check_last': check_last, 'prefix': prefix} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {'prefix': prefix} flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -1115,25 +550,16 @@ def test_check_archives_with_retention_prefix(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks=checks, + archive_filter_flags=(), ) def test_check_archives_with_extra_borg_options_passes_through_to_borg(): - checks = ('repository',) - config = {'check_last': None, 'extra_borg_options': {'check': '--extra --options'}} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return(()) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {'extra_borg_options': {'check': '--extra --options'}} flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo')) - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -1143,22 +569,13 @@ def test_check_archives_with_extra_borg_options_passes_through_to_borg(): progress=None, repair=None, only_checks=None, force=None, match_archives=None ), global_arguments=flexmock(log_json=False), + checks={'repository'}, + archive_filter_flags=(), ) def test_check_archives_with_match_archives_passes_through_to_borg(): - checks = ('archives',) - config = {'check_last': None} - flexmock(module.rinfo).should_receive('display_repository_info').and_return( - '{"repository": {"id": "repo"}}' - ) - flexmock(module).should_receive('upgrade_check_times') - flexmock(module).should_receive('parse_checks') - flexmock(module).should_receive('make_archive_filter_flags').and_return( - ('--match-archives', 'foo-*') - ) - flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) + config = {} flexmock(module).should_receive('make_check_flags').and_return(('--match-archives', 'foo-*')) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.environment).should_receive('make_environment') @@ -1168,8 +585,6 @@ def test_check_archives_with_match_archives_passes_through_to_borg(): borg_local_path='borg', borg_exit_codes=None, ).once() - flexmock(module).should_receive('make_check_time_path') - flexmock(module).should_receive('write_check_time') module.check_archives( repository_path='repo', @@ -1179,4 +594,6 @@ def test_check_archives_with_match_archives_passes_through_to_borg(): progress=None, repair=None, only_checks=None, force=None, match_archives='foo-*' ), global_arguments=flexmock(log_json=False), + checks={'archives'}, + archive_filter_flags=('--match-archives', 'foo-*'), ) diff --git a/tests/unit/config/test_generate.py b/tests/unit/config/test_generate.py index a86c5f5..1ba1df3 100644 --- a/tests/unit/config/test_generate.py +++ b/tests/unit/config/test_generate.py @@ -6,9 +6,48 @@ from flexmock import flexmock from borgmatic.config import generate as module +def test_get_properties_with_simple_object(): + schema = { + 'type': 'object', + 'properties': OrderedDict( + [ + ('field1', {'example': 'Example'}), + ] + ), + } + + assert module.get_properties(schema) == schema['properties'] + + +def test_get_properties_merges_one_of_list_properties(): + schema = { + 'type': 'object', + 'oneOf': [ + { + 'properties': OrderedDict( + [ + ('field1', {'example': 'Example 1'}), + ('field2', {'example': 'Example 2'}), + ] + ), + }, + { + 'properties': OrderedDict( + [ + ('field2', {'example': 'Example 2'}), + ('field3', {'example': 'Example 3'}), + ] + ), + }, + ], + } + + assert module.get_properties(schema) == dict( + schema['oneOf'][0]['properties'], **schema['oneOf'][1]['properties'] + ) + + def test_schema_to_sample_configuration_generates_config_map_with_examples(): - flexmock(module.ruamel.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) - flexmock(module).should_receive('add_comments_to_configuration_object') schema = { 'type': 'object', 'properties': OrderedDict( @@ -19,6 +58,9 @@ def test_schema_to_sample_configuration_generates_config_map_with_examples(): ] ), } + flexmock(module).should_receive('get_properties').and_return(schema['properties']) + flexmock(module.ruamel.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) + flexmock(module).should_receive('add_comments_to_configuration_object') config = module.schema_to_sample_configuration(schema) @@ -42,9 +84,6 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_strings_wit def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_examples(): - flexmock(module.ruamel.yaml.comments).should_receive('CommentedSeq').replace_with(list) - flexmock(module).should_receive('add_comments_to_configuration_sequence') - flexmock(module).should_receive('add_comments_to_configuration_object') schema = { 'type': 'array', 'items': { @@ -54,6 +93,10 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_e ), }, } + flexmock(module).should_receive('get_properties').and_return(schema['items']['properties']) + flexmock(module.ruamel.yaml.comments).should_receive('CommentedSeq').replace_with(list) + flexmock(module).should_receive('add_comments_to_configuration_sequence') + flexmock(module).should_receive('add_comments_to_configuration_object') config = module.schema_to_sample_configuration(schema) From 6680aece5ac542dda8bcd6773d347ac88f4219d4 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Thu, 4 Apr 2024 14:23:56 -0700 Subject: [PATCH 2/5] Split out (most of) command construction from create_archive() in preparation for reuse in spot check (#656). --- borgmatic/actions/check.py | 29 +- borgmatic/borg/check.py | 6 +- borgmatic/borg/create.py | 77 +- tests/unit/actions/test_check.py | 10 +- tests/unit/borg/test_check.py | 35 +- tests/unit/borg/test_create.py | 2634 ++++++++++-------------------- 6 files changed, 959 insertions(+), 1832 deletions(-) diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index 7598d6d..ca5ad3c 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -2,16 +2,15 @@ import datetime import hashlib import itertools import logging -import pathlib import os +import pathlib -import borgmatic.borg.extract import borgmatic.borg.check +import borgmatic.borg.extract import borgmatic.borg.state import borgmatic.config.validate import borgmatic.hooks.command - DEFAULT_CHECKS = ( {'name': 'repository', 'frequency': '1 month'}, {'name': 'archives', 'frequency': '1 month'}, @@ -176,7 +175,9 @@ def make_check_time_path(config, borg_repository_id, check_type, archives_check_ that check's time (the time of that check last occurring). ''' borgmatic_source_directory = os.path.expanduser( - config.get('borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) + config.get( + 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY + ) ) if check_type in ('archives', 'data'): @@ -354,9 +355,7 @@ def run_check( remote_path=remote_path, ) for check in borg_specific_checks: - write_check_time( - make_check_time_path(config, repository_id, check, archives_check_id) - ) + write_check_time(make_check_time_path(config, repository_id, check, archives_check_id)) if 'extract' in checks: borgmatic.borg.extract.extract_last_archive_dry_run( @@ -370,14 +369,14 @@ def run_check( ) write_check_time(make_check_time_path(config, repository_id, 'extract')) - #if 'spot' in checks: - # TODO: - # count the number of files in source directories - # in a loop until the sample percentage (of the total source files) is met: - # pick a random file from source directories and calculate its sha256 sum - # extract the file from the latest archive (to stdout) and calculate its sha256 sum - # if the two checksums are equal, increment the matching files count - # if the percentage of matching files (of the total source files) < tolerance percentage, error + # if 'spot' in checks: + # TODO: + # count the number of files in source directories, but need to take patterns and stuff into account... + # in a loop until the sample percentage (of the total source files) is met: + # pick a random file from source directories and calculate its sha256 sum + # extract the file from the latest archive (to stdout) and calculate its sha256 sum + # if the two checksums are equal, increment the matching files count + # if the percentage of matching files (of the total source files) < tolerance percentage, error borgmatic.hooks.command.execute_hook( config.get('after_check'), diff --git a/borgmatic/borg/check.py b/borgmatic/borg/check.py index 84808ab..9f3c8ce 100644 --- a/borgmatic/borg/check.py +++ b/borgmatic/borg/check.py @@ -1,12 +1,10 @@ import argparse import json import logging -import os from borgmatic.borg import environment, feature, flags, rinfo from borgmatic.execute import DO_NOT_CAPTURE, execute_command - logger = logging.getLogger(__name__) @@ -85,7 +83,9 @@ def make_check_flags(checks, archive_filter_flags): ) -def get_repository_id(repository_path, config, local_borg_version, global_arguments, local_path, remote_path): +def get_repository_id( + repository_path, config, local_borg_version, global_arguments, local_path, remote_path +): ''' Given a local or remote repository path, a configuration dict, the local Borg version, global arguments, and local/remote commands to run, return the corresponding Borg repository ID. diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py index 8a899e9..59fa1a2 100644 --- a/borgmatic/borg/create.py +++ b/borgmatic/borg/create.py @@ -320,35 +320,31 @@ def check_all_source_directories_exist(source_directories): raise ValueError(f"Source directories do not exist: {', '.join(missing_directories)}") -def create_archive( +def make_base_create_command( dry_run, repository_path, config, config_paths, local_borg_version, global_arguments, + borgmatic_source_directories, local_path='borg', remote_path=None, progress=False, - stats=False, json=False, list_files=False, stream_processes=None, ): ''' Given vebosity/dry-run flags, a local or remote repository path, a configuration dict, a - sequence of loaded configuration paths, the local Borg version, and global arguments as an - argparse.Namespace instance, create a Borg archive and return Borg's JSON output (if any). - - If a sequence of stream processes is given (instances of subprocess.Popen), then execute the - create command while also triggering the given processes to produce output. + sequence of loaded configuration paths, the local Borg version, global arguments as an + argparse.Namespace instance, and a sequence of borgmatic source directories, return a tuple of + (base Borg create command flags, Borg create command positional arguments, open pattern file + handle, open exclude file handle). ''' - borgmatic.logger.add_custom_log_levels() - borgmatic_source_directories = expand_directories( - collect_borgmatic_source_directories(config.get('borgmatic_source_directory')) - ) if config.get('source_directories_must_exist', False): check_all_source_directories_exist(config.get('source_directories')) + sources = deduplicate_directories( map_directories_to_devices( expand_directories( @@ -364,11 +360,6 @@ def create_archive( ensure_files_readable(config.get('patterns_from'), config.get('exclude_from')) - try: - working_directory = os.path.expanduser(config.get('working_directory')) - except TypeError: - working_directory = None - pattern_file = ( write_pattern_file(config.get('patterns'), sources) if config.get('patterns') or config.get('patterns_from') @@ -451,6 +442,55 @@ def create_archive( repository_path, archive_name_format, local_borg_version ) + (sources if not pattern_file else ()) + return (create_flags, create_positional_arguments, pattern_file, exclude_file) + + +def create_archive( + dry_run, + repository_path, + config, + config_paths, + local_borg_version, + global_arguments, + local_path='borg', + remote_path=None, + progress=False, + stats=False, + json=False, + list_files=False, + stream_processes=None, +): + ''' + Given vebosity/dry-run flags, a local or remote repository path, a configuration dict, a + sequence of loaded configuration paths, the local Borg version, and global arguments as an + argparse.Namespace instance, create a Borg archive and return Borg's JSON output (if any). + + If a sequence of stream processes is given (instances of subprocess.Popen), then execute the + create command while also triggering the given processes to produce output. + ''' + borgmatic.logger.add_custom_log_levels() + borgmatic_source_directories = expand_directories( + collect_borgmatic_source_directories(config.get('borgmatic_source_directory')) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + make_base_create_command( + dry_run, + repository_path, + config, + config_paths, + local_borg_version, + global_arguments, + borgmatic_source_directories, + local_path, + remote_path, + progress, + json, + list_files, + stream_processes, + ) + ) + if json: output_log_level = None elif list_files or (stats and not dry_run): @@ -462,6 +502,11 @@ def create_archive( # the terminal directly. output_file = DO_NOT_CAPTURE if progress else None + try: + working_directory = os.path.expanduser(config.get('working_directory')) + except TypeError: + working_directory = None + borg_environment = environment.make_environment(config) # If database hooks are enabled (as indicated by streaming processes), exclude files that might diff --git a/tests/unit/actions/test_check.py b/tests/unit/actions/test_check.py index 83c840f..dfca886 100644 --- a/tests/unit/actions/test_check.py +++ b/tests/unit/actions/test_check.py @@ -1,5 +1,5 @@ -from flexmock import flexmock import pytest +from flexmock import flexmock from borgmatic.actions import check as module @@ -417,7 +417,9 @@ def test_run_check_checks_archives_for_configured_repository(): flexmock(module).should_receive('parse_checks') flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return({'repository', 'archives'}) + flexmock(module).should_receive('filter_checks_on_frequency').and_return( + {'repository', 'archives'} + ) flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') @@ -527,7 +529,9 @@ def test_run_check_checks_archives_in_selected_repository(): flexmock(module).should_receive('parse_checks') flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) flexmock(module).should_receive('make_archives_check_id').and_return(None) - flexmock(module).should_receive('filter_checks_on_frequency').and_return({'repository', 'archives'}) + flexmock(module).should_receive('filter_checks_on_frequency').and_return( + {'repository', 'archives'} + ) flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') diff --git a/tests/unit/borg/test_check.py b/tests/unit/borg/test_check.py index 280dc93..8549e43 100644 --- a/tests/unit/borg/test_check.py +++ b/tests/unit/borg/test_check.py @@ -69,7 +69,10 @@ def test_make_archive_filter_flags_with_archives_check_and_last_includes_last_fl flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {'check_last': 3}, ('archives',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {'check_last': 3}, + ('archives',), + check_arguments=flexmock(match_archives=None), ) assert flags == ('--last', '3') @@ -80,7 +83,10 @@ def test_make_archive_filter_flags_with_data_check_and_last_includes_last_flag() flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {'check_last': 3}, ('data',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {'check_last': 3}, + ('data',), + check_arguments=flexmock(match_archives=None), ) assert flags == ('--last', '3') @@ -91,7 +97,10 @@ def test_make_archive_filter_flags_with_repository_check_and_last_omits_last_fla flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {'check_last': 3}, ('repository',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {'check_last': 3}, + ('repository',), + check_arguments=flexmock(match_archives=None), ) assert flags == () @@ -116,7 +125,10 @@ def test_make_archive_filter_flags_with_archives_check_and_prefix_includes_match flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {'prefix': 'foo-'}, ('archives',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {'prefix': 'foo-'}, + ('archives',), + check_arguments=flexmock(match_archives=None), ) assert flags == ('--match-archives', 'sh:foo-*') @@ -127,7 +139,10 @@ def test_make_archive_filter_flags_with_data_check_and_prefix_includes_match_arc flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {'prefix': 'foo-'}, ('data',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {'prefix': 'foo-'}, + ('data',), + check_arguments=flexmock(match_archives=None), ) assert flags == ('--match-archives', 'sh:foo-*') @@ -170,7 +185,10 @@ def test_make_archive_filter_flags_with_archives_check_and_none_prefix_omits_mat flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {}, ('archives',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {}, + ('archives',), + check_arguments=flexmock(match_archives=None), ) assert flags == () @@ -181,7 +199,10 @@ def test_make_archive_filter_flags_with_repository_check_and_prefix_omits_match_ flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flags = module.make_archive_filter_flags( - '1.2.3', {'prefix': 'foo-'}, ('repository',), check_arguments=flexmock(match_archives=None), + '1.2.3', + {'prefix': 'foo-'}, + ('repository',), + check_arguments=flexmock(match_archives=None), ) assert flags == () diff --git a/tests/unit/borg/test_create.py b/tests/unit/borg/test_create.py index bf91bc3..dd27961 100644 --- a/tests/unit/borg/test_create.py +++ b/tests/unit/borg/test_create.py @@ -474,10 +474,287 @@ DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003 REPO_ARCHIVE_WITH_PATHS = (f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'bar') -def test_create_archive_calls_borg_with_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) +def test_make_base_create_produces_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_patterns_file_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + mock_pattern_file = flexmock(name='/tmp/patterns') + flexmock(module).should_receive('write_pattern_file').and_return(mock_pattern_file).and_return( + None + ) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + pattern_flags = ('--patterns-from', mock_pattern_file.name) + flexmock(module).should_receive('make_pattern_flags').and_return(pattern_flags) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'patterns': ['pattern'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + pattern_flags + assert create_positional_arguments == (f'repo::{DEFAULT_ARCHIVE_NAME}',) + assert pattern_file == mock_pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_sources_and_config_paths_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return( + ('foo', 'bar', '/tmp/test.yaml') + ) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) + flexmock(module).should_receive('expand_directories').with_args( + ('foo', 'bar', '/tmp/test.yaml') + ).and_return(('foo', 'bar', '/tmp/test.yaml')) + flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + ('/tmp/test.yaml',) + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_with_store_config_false_omits_config_files(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) + flexmock(module).should_receive('expand_directories').with_args(('foo', 'bar')).and_return( + ('foo', 'bar') + ) + flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'store_config_files': False, + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_exclude_patterns_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(('exclude',)) + mock_exclude_file = flexmock(name='/tmp/excludes') + flexmock(module).should_receive('write_pattern_file').and_return(mock_exclude_file) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + exclude_flags = ('--exclude-from', 'excludes') + flexmock(module).should_receive('make_exclude_flags').and_return(exclude_flags) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'exclude_patterns': ['exclude'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + exclude_flags + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert exclude_file == mock_exclude_file + + +@pytest.mark.parametrize( + 'option_name,option_value,feature_available,option_flags', + ( + ('checkpoint_interval', 600, True, ('--checkpoint-interval', '600')), + ('checkpoint_volume', 1024, True, ('--checkpoint-volume', '1024')), + ('chunker_params', '1,2,3,4', True, ('--chunker-params', '1,2,3,4')), + ('compression', 'rle', True, ('--compression', 'rle')), + ('one_file_system', True, True, ('--one-file-system',)), + ('upload_rate_limit', 100, True, ('--upload-ratelimit', '100')), + ('upload_rate_limit', 100, False, ('--remote-ratelimit', '100')), + ('numeric_ids', True, True, ('--numeric-ids',)), + ('numeric_ids', True, False, ('--numeric-owner',)), + ('read_special', True, True, ('--read-special',)), + ('ctime', True, True, ()), + ('ctime', False, True, ('--noctime',)), + ('birthtime', True, True, ()), + ('birthtime', False, True, ('--nobirthtime',)), + ('atime', True, True, ('--atime',)), + ('atime', True, False, ()), + ('atime', False, True, ()), + ('atime', False, False, ('--noatime',)), + ('flags', True, True, ()), + ('flags', True, False, ()), + ('flags', False, True, ('--noflags',)), + ('flags', False, False, ('--nobsdflags',)), + ('files_cache', 'ctime,size', True, ('--files-cache', 'ctime,size')), + ('umask', 740, True, ('--umask', '740')), + ('lock_wait', 5, True, ('--lock-wait', '5')), + ), +) +def test_make_base_create_command_includes_configuration_option_as_command_flag( + option_name, option_value, feature_available, option_flags +): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(feature_available) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + option_name: option_value, + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + option_flags + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_dry_run_in_borg_command(): flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) @@ -493,6 +770,489 @@ def test_create_archive_calls_borg_with_parameters(): flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=True, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'exclude_patterns': ['exclude'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create', '--dry-run') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_local_path_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + local_path='borg1', + ) + ) + + assert create_flags == ('borg1', 'create') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_remote_path_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + remote_path='borg1', + ) + ) + + assert create_flags == ('borg', 'create', '--remote-path', 'borg1') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_log_json_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=True), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create', '--log-json') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_list_flags_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + list_files=True, + ) + ) + + assert create_flags == ('borg', 'create', '--list', '--filter', 'FOO') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_with_stream_processes_ignores_read_special_false_and_logs_warning(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module.logger).should_receive('warning').once() + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'read_special': False, + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + stream_processes=flexmock(), + ) + ) + + assert create_flags == ('borg', 'create', '--one-file-system', '--read-special') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_with_non_matching_source_directories_glob_passes_through(): + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('deduplicate_directories').and_return(('foo*',)) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo*'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == (f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo*') + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_expands_glob_in_source_directories(): + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'food')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo*'], + 'repositories': ['repo'], + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == (f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food') + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_archive_name_format_in_borg_command(): + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + ('repo::ARCHIVE_NAME',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'archive_name_format': 'ARCHIVE_NAME', + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == ('repo::ARCHIVE_NAME', 'foo', 'bar') + assert not pattern_file + assert not exclude_file + + +def test_base_create_command_includes_archive_name_format_with_placeholders_in_borg_command(): + repository_archive_pattern = 'repo::Documents_{hostname}-{now}' # noqa: FS003 + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (repository_archive_pattern,) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003 + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == (repository_archive_pattern, 'foo', 'bar') + assert not pattern_file + assert not exclude_file + + +def test_base_create_command_includes_repository_and_archive_name_format_with_placeholders_in_borg_command(): + repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}' # noqa: FS003 + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (repository_archive_pattern,) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='{fqdn}', # noqa: FS003 + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['{fqdn}'], # noqa: FS003 + 'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003 + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create') + assert create_positional_arguments == (repository_archive_pattern, 'foo', 'bar') + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_includes_extra_borg_options_in_borg_command(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'extra_borg_options': {'create': '--extra --options'}, + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + ) + + assert create_flags == ('borg', 'create', '--extra', '--options') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert not exclude_file + + +def test_make_base_create_command_with_non_existent_directory_and_source_directories_must_exist_raises(): + flexmock(module).should_receive('check_all_source_directories_exist').and_raise(ValueError) + + with pytest.raises(ValueError): + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'source_directories_must_exist': True, + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + ) + + +def test_create_archive_calls_borg_with_parameters(): + flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') + flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) + ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS, @@ -521,21 +1281,10 @@ def test_create_archive_calls_borg_with_parameters(): def test_create_archive_calls_borg_with_environment(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) environment = {'BORG_THINGY': 'YUP'} flexmock(module.environment).should_receive('make_environment').and_return(environment) @@ -563,217 +1312,13 @@ def test_create_archive_calls_borg_with_environment(): ) -def test_create_archive_with_patterns_calls_borg_with_patterns_including_converted_source_directories(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - pattern_flags = ('--patterns-from', 'patterns') - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return( - flexmock(name='/tmp/patterns') - ).and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(pattern_flags) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + pattern_flags + (f'repo::{DEFAULT_ARCHIVE_NAME}',), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'patterns': ['pattern'], - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_sources_and_config_paths_calls_borg_with_sources_and_config_paths(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return( - ('foo', 'bar', '/tmp/test.yaml') - ) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) - flexmock(module).should_receive('expand_directories').with_args( - ('foo', 'bar', '/tmp/test.yaml') - ).and_return(('foo', 'bar', '/tmp/test.yaml')) - flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - environment = {'BORG_THINGY': 'YUP'} - flexmock(module.environment).should_receive('make_environment').and_return(environment) - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('/tmp/test.yaml',), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=environment, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_sources_and_config_paths_with_store_config_files_false_calls_borg_with_sources_and_no_config_paths(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) - flexmock(module).should_receive('expand_directories').with_args(('foo', 'bar')).and_return( - ('foo', 'bar') - ) - flexmock(module).should_receive('expand_directories').with_args([]).and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - environment = {'BORG_THINGY': 'YUP'} - flexmock(module.environment).should_receive('make_environment').and_return(environment) - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=environment, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'store_config_files': False, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_exclude_patterns_calls_borg_with_excludes(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - exclude_flags = ('--exclude-from', 'excludes') - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(('exclude',)) - flexmock(module).should_receive('write_pattern_file').and_return(None).and_return( - flexmock(name='/tmp/excludes') - ) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(exclude_flags) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + exclude_flags + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': ['exclude'], - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - def test_create_archive_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -804,21 +1349,10 @@ def test_create_archive_with_log_info_calls_borg_with_info_parameter(): def test_create_archive_with_log_info_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( @@ -848,21 +1382,10 @@ def test_create_archive_with_log_info_and_json_suppresses_most_borg_output(): def test_create_archive_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -893,21 +1416,10 @@ def test_create_archive_with_log_debug_calls_borg_with_debug_parameter(): def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( @@ -934,70 +1446,15 @@ def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output(): ) -def test_create_archive_with_dry_run_calls_borg_with_dry_run_parameter(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--dry-run') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=True, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_stats_and_dry_run_calls_borg_without_stats_parameter(): +def test_create_archive_with_stats_and_dry_run_calls_borg_without_stats(): # --dry-run and --stats are mutually exclusive, see: # https://borgbackup.readthedocs.io/en/stable/usage/create.html#description flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create', '--dry-run'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -1026,257 +1483,13 @@ def test_create_archive_with_stats_and_dry_run_calls_borg_without_stats_paramete ) -def test_create_archive_with_checkpoint_interval_calls_borg_with_checkpoint_interval_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--checkpoint-interval', '600') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'checkpoint_interval': 600, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_checkpoint_volume_calls_borg_with_checkpoint_volume_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--checkpoint-volume', '1024') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'checkpoint_volume': 1024, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_chunker_params_calls_borg_with_chunker_params_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--chunker-params', '1,2,3,4') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'chunker_params': '1,2,3,4', - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_compression_calls_borg_with_compression_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--compression', 'rle') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'compression': 'rle', - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -@pytest.mark.parametrize( - 'feature_available,option_flag', - ((True, '--upload-ratelimit'), (False, '--remote-ratelimit')), -) -def test_create_archive_with_upload_rate_limit_calls_borg_with_upload_ratelimit_parameters( - feature_available, option_flag -): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(feature_available) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', option_flag, '100') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'upload_rate_limit': 100, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - def test_create_archive_with_working_directory_calls_borg_with_working_directory(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').with_args('/working/dir').and_return( - '/working/dir' - ) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -1304,435 +1517,13 @@ def test_create_archive_with_working_directory_calls_borg_with_working_directory ) -def test_create_archive_with_one_file_system_calls_borg_with_one_file_system_parameter(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--one-file-system') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'one_file_system': True, - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -@pytest.mark.parametrize( - 'feature_available,option_flag', - ((True, '--numeric-ids'), (False, '--numeric-owner')), -) -def test_create_archive_with_numeric_ids_calls_borg_with_numeric_ids_parameter( - feature_available, option_flag -): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(feature_available) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', option_flag) + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'numeric_ids': True, - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_read_special_calls_borg_with_read_special_parameter(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').and_return(()) - create_command = ('borg', 'create', '--read-special') + REPO_ARCHIVE_WITH_PATHS - flexmock(module).should_receive('execute_command').with_args( - create_command + ('--dry-run', '--list'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module).should_receive('execute_command').with_args( - create_command, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'read_special': True, - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -@pytest.mark.parametrize( - 'option_name,option_value', - ( - ('ctime', True), - ('ctime', False), - ('birthtime', True), - ('birthtime', False), - ), -) -def test_create_archive_with_basic_option_calls_borg_with_corresponding_parameter( - option_name, option_value -): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - option_flag = '--no' + option_name.replace('', '') if option_value is False else None - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + ((option_flag,) if option_flag else ()) + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - option_name: option_value, - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -@pytest.mark.parametrize( - 'option_value,feature_available,option_flag', - ( - (True, True, '--atime'), - (True, False, None), - (False, True, None), - (False, False, '--noatime'), - ), -) -def test_create_archive_with_atime_option_calls_borg_with_corresponding_parameter( - option_value, feature_available, option_flag -): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(feature_available) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + ((option_flag,) if option_flag else ()) + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'atime': option_value, - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -@pytest.mark.parametrize( - 'option_value,feature_available,option_flag', - ( - (True, True, None), - (True, False, None), - (False, True, '--noflags'), - (False, False, '--nobsdflags'), - ), -) -def test_create_archive_with_flags_option_calls_borg_with_corresponding_parameter( - option_value, feature_available, option_flag -): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(feature_available) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create') + ((option_flag,) if option_flag else ()) + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'flags': option_value, - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_files_cache_calls_borg_with_files_cache_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--files-cache', 'ctime,size') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'files_cache': 'ctime,size', - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_local_path_calls_borg_via_local_path(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg1', 'create') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg1', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - local_path='borg1', - ) - - def test_create_archive_with_exit_codes_calls_borg_using_them(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') borg_exit_codes = flexmock() @@ -1761,203 +1552,13 @@ def test_create_archive_with_exit_codes_calls_borg_using_them(): ) -def test_create_archive_with_remote_path_calls_borg_with_remote_path_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--remote-path', 'borg1') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - remote_path='borg1', - ) - - -def test_create_archive_with_umask_calls_borg_with_umask_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--umask', '740') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'umask': 740, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_log_json_calls_borg_with_log_json_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--log-json') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=True), - ) - - -def test_create_archive_with_lock_wait_calls_borg_with_lock_wait_parameters(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--lock-wait', '5') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'lock_wait': 5, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - def test_create_archive_with_stats_calls_borg_with_stats_parameter_and_answer_output_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -1985,24 +1586,18 @@ def test_create_archive_with_stats_calls_borg_with_stats_parameter_and_answer_ou ) -def test_create_archive_with_files_calls_borg_with_list_parameter_and_answer_output_log_level(): +def test_create_archive_with_files_calls_borg_with_answer_output_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + ( + ('borg', 'create', '--list', '--filter', 'FOO'), + REPO_ARCHIVE_WITH_PATHS, + flexmock(), + flexmock(), + ) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -2033,21 +1628,10 @@ def test_create_archive_with_files_calls_borg_with_list_parameter_and_answer_out def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_parameter_and_no_list(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -2079,21 +1663,10 @@ def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_para def test_create_archive_with_progress_calls_borg_with_progress_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( @@ -2125,21 +1698,15 @@ def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progr flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + ( + ('borg', 'create', '--one-file-system', '--read-special'), + REPO_ARCHIVE_WITH_PATHS, + flexmock(), + flexmock(), + ) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(()) @@ -2187,29 +1754,25 @@ def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progr ) -def test_create_archive_with_stream_processes_ignores_read_special_false_and_logs_warnings(): +def test_create_archive_with_stream_processes_ands_read_special_false_excludes_special_files(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(flexmock(name='/tmp/excludes')) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module.logger).should_receive('warning').twice() - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + ( + ('borg', 'create', '--one-file-system', '--read-special'), + REPO_ARCHIVE_WITH_PATHS, + flexmock(), + flexmock(), + ) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(('/dev/null',)) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(flexmock(name='patterns')) + flexmock(module).should_receive('make_exclude_flags').and_return(()) create_command = ( 'borg', 'create', @@ -2253,100 +1816,22 @@ def test_create_archive_with_stream_processes_ignores_read_special_false_and_log ) -def test_create_archive_with_stream_processes_adds_special_files_to_excludes(): +def test_create_archive_with_stream_processes_and_read_special_true_skips_special_files_excludes(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()).and_return( - ('special',) - ) - flexmock(module).should_receive('write_pattern_file').and_return(None).and_return( - flexmock(name='/excludes') - ) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()).and_return( - '--exclude-from', '/excludes' - ) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + ( + ('borg', 'create', '--one-file-system', '--read-special'), + REPO_ARCHIVE_WITH_PATHS, + flexmock(), + flexmock(), + ) ) flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').and_return(('special',)) - create_flags = ( - 'borg', - 'create', - '--one-file-system', - '--read-special', - ) - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_flags + ('--dry-run', '--list') + REPO_ARCHIVE_WITH_PATHS, - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_flags + ('--exclude-from', '/excludes') + REPO_ARCHIVE_WITH_PATHS, - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - stream_processes=processes, - ) - - -def test_create_archive_with_stream_processes_and_read_special_does_not_add_special_files_to_excludes(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - processes = flexmock() - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()).and_return( - ('special',) - ) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').and_return(('special',)) + flexmock(module).should_receive('collect_special_file_paths').never() create_command = ( 'borg', 'create', @@ -2390,24 +1875,13 @@ def test_create_archive_with_stream_processes_and_read_special_does_not_add_spec ) -def test_create_archive_with_json_calls_borg_with_json_parameter(): +def test_create_archive_with_json_calls_borg_with_json_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( @@ -2435,24 +1909,13 @@ def test_create_archive_with_json_calls_borg_with_json_parameter(): assert json_output == '[]' -def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter(): +def test_create_archive_with_stats_and_json_calls_borg_without_stats_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) + flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) + flexmock(module).should_receive('make_base_create_command').and_return( + (('borg', 'create'), REPO_ARCHIVE_WITH_PATHS, flexmock(), flexmock()) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( @@ -2467,7 +1930,7 @@ def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter() dry_run=False, repository_path='repo', config={ - 'source_directories': ['foo', 'bar'], + 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, @@ -2481,411 +1944,6 @@ def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter() assert json_output == '[]' -def test_create_archive_with_source_directories_glob_expands(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'food')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module.glob).should_receive('glob').with_args('foo*').and_return(['foo', 'food']) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo*'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_non_matching_source_directories_glob_passes_through(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo*',)) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo*'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module.glob).should_receive('glob').with_args('foo*').and_return([]) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo*'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_glob_calls_borg_with_expanded_directories(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'food')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo*'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_archive_name_format_calls_borg_with_archive_name(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - ('repo::ARCHIVE_NAME',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', 'repo::ARCHIVE_NAME', 'foo', 'bar'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'archive_name_format': 'ARCHIVE_NAME', - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_archive_name_format_accepts_borg_placeholders(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - repository_archive_pattern = 'repo::Documents_{hostname}-{now}' # noqa: FS003 - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (repository_archive_pattern,) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', repository_archive_pattern, 'foo', 'bar'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003 - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_repository_accepts_borg_placeholders(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}' # noqa: FS003 - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (repository_archive_pattern,) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', repository_archive_pattern, 'foo', 'bar'), - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='{fqdn}', # noqa: FS003 - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['{fqdn}'], # noqa: FS003 - 'exclude_patterns': None, - 'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003 - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_extra_borg_options_calls_borg_with_extra_options(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('execute_command').with_args( - ('borg', 'create', '--extra', '--options') + REPO_ARCHIVE_WITH_PATHS, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'extra_borg_options': {'create': '--extra --options'}, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - -def test_create_archive_with_stream_processes_calls_borg_with_processes_and_read_special(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - processes = flexmock() - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) - flexmock(module).should_receive('map_directories_to_devices').and_return({}) - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('pattern_root_directories').and_return([]) - flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(None) - flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') - flexmock(module.feature).should_receive('available').and_return(True) - flexmock(module).should_receive('ensure_files_readable') - flexmock(module).should_receive('make_pattern_flags').and_return(()) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( - (f'repo::{DEFAULT_ARCHIVE_NAME}',) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').and_return(()) - create_command = ( - 'borg', - 'create', - '--one-file-system', - '--read-special', - ) + REPO_ARCHIVE_WITH_PATHS - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_command + ('--dry-run', 'list'), - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_command, - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - stream_processes=processes, - ) - - -def test_create_archive_with_non_existent_directory_and_source_directories_must_exist_raises_error(): - ''' - If a source directory doesn't exist and source_directories_must_exist is True, raise an error. - ''' - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('check_all_source_directories_exist').and_raise(ValueError) - - with pytest.raises(ValueError): - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'source_directories_must_exist': True, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - ) - - def test_check_all_source_directories_exist_with_glob_and_tilde_directories(): flexmock(module).should_receive('expand_directory').with_args('foo*').and_return( ('foo', 'food') From 4c2eb2bfe385346952fc20d292d7cbb02af9ef36 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 15 Apr 2024 11:02:05 -0700 Subject: [PATCH 3/5] Spot check basically complete other than docs (#656). --- NEWS | 5 + borgmatic/actions/check.py | 294 ++++++++++++++++- borgmatic/actions/json.py | 3 +- borgmatic/borg/create.py | 75 +++-- borgmatic/borg/list.py | 9 +- borgmatic/commands/arguments.py | 4 +- borgmatic/config/schema.yaml | 45 ++- borgmatic/execute.py | 12 +- borgmatic/hooks/mariadb.py | 8 + borgmatic/hooks/mongodb.py | 8 + borgmatic/hooks/mysql.py | 8 + borgmatic/hooks/postgresql.py | 8 + borgmatic/hooks/sqlite.py | 16 +- tests/unit/actions/test_check.py | 474 ++++++++++++++++++++++++++++ tests/unit/actions/test_json.py | 1 - tests/unit/borg/test_create.py | 173 +++------- tests/unit/hooks/test_mariadb.py | 10 + tests/unit/hooks/test_mongodb.py | 20 ++ tests/unit/hooks/test_mysql.py | 10 + tests/unit/hooks/test_postgresql.py | 20 ++ tests/unit/hooks/test_sqlite.py | 10 + tests/unit/test_execute.py | 7 + 22 files changed, 1027 insertions(+), 193 deletions(-) diff --git a/NEWS b/NEWS index 7510370..22d0952 100644 --- a/NEWS +++ b/NEWS @@ -1,4 +1,9 @@ 1.8.10.dev0 + * #656: Add a "spot" consistency check that compares file counts and contents between your source + files and the latest archive, ensuring they fall within configured tolerances. This can catch + problems like incorrect excludes, inadvertent deletes, files changed by malware, etc. See the + documentation for more information: + https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#spot-check * #842: When a command hook exits with a soft failure, ping the log and finish states for any configured monitoring hooks. * #843: Add documentation link to Loki dashboard for borgmatic: diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index ca5ad3c..2c21570 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -4,11 +4,17 @@ import itertools import logging import os import pathlib +import random import borgmatic.borg.check +import borgmatic.borg.create +import borgmatic.borg.environment import borgmatic.borg.extract +import borgmatic.borg.list +import borgmatic.borg.rlist import borgmatic.borg.state import borgmatic.config.validate +import borgmatic.execute import borgmatic.hooks.command DEFAULT_CHECKS = ( @@ -288,6 +294,276 @@ def upgrade_check_times(config, borg_repository_id): os.rename(temporary_path, new_path) +def collect_spot_check_source_paths( + repository, config, local_borg_version, global_arguments, local_path, remote_path +): + ''' + Given a repository configuration dict, a configuration dict, the local Borg version, global + arguments as an argparse.Namespace instance, the local Borg path, and the remote Borg path, + collect the source paths that Borg would use in an actual create (but only include files and + symlinks). + ''' + stream_processes = any( + borgmatic.hooks.dispatch.call_hooks( + 'use_streaming', + config, + repository['path'], + borgmatic.hooks.dump.DATA_SOURCE_HOOK_NAMES, + ).values() + ) + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + borgmatic.borg.create.make_base_create_command( + dry_run=True, + repository_path=repository['path'], + config=config, + config_paths=(), + local_borg_version=local_borg_version, + global_arguments=global_arguments, + borgmatic_source_directories=(), + local_path=local_path, + remote_path=remote_path, + list_files=True, + stream_processes=stream_processes, + ) + ) + borg_environment = borgmatic.borg.environment.make_environment(config) + + try: + working_directory = os.path.expanduser(config.get('working_directory')) + except TypeError: + working_directory = None + + paths_output = borgmatic.execute.execute_command_and_capture_output( + create_flags + create_positional_arguments, + capture_stderr=True, + working_directory=working_directory, + extra_environment=borg_environment, + borg_local_path=local_path, + borg_exit_codes=config.get('borg_exit_codes'), + ) + + paths = tuple( + path_line.split(' ', 1)[1] + for path_line in paths_output.split('\n') + if path_line and path_line.startswith('- ') or path_line.startswith('+ ') + ) + + return tuple(path for path in paths if os.path.isfile(path) or os.path.islink(path)) + + +BORG_DIRECTORY_FILE_TYPE = 'd' + + +def collect_spot_check_archive_paths( + repository, archive, config, local_borg_version, global_arguments, local_path, remote_path +): + ''' + Given a repository configuration dict, the name of the latest archive, a configuration dict, the + local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, and + the remote Borg path, collect the paths from the given archive (but only include files and + symlinks). + ''' + borgmatic_source_directory = os.path.expanduser( + config.get( + 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY + ) + ) + + return tuple( + path + for line in borgmatic.borg.list.capture_archive_listing( + repository['path'], + archive, + config, + local_borg_version, + global_arguments, + path_format='{type} /{path}{NL}', # noqa: FS003 + local_path=local_path, + remote_path=remote_path, + ) + for (file_type, path) in (line.split(' ', 1),) + if file_type != BORG_DIRECTORY_FILE_TYPE + if pathlib.Path(borgmatic_source_directory) not in pathlib.Path(path).parents + ) + + +def compare_spot_check_hashes( + repository, + archive, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, + log_label, + source_paths, +): + ''' + Given a repository configuration dict, the name of the latest archive, a configuration dict, the + local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the + remote Borg path, a log label, and spot check source paths, compare the hashes for a sampling of + the source paths with hashes from corresponding paths in the given archive. Return a sequence of + the paths that fail that hash comparison. + ''' + # Based on the configured sample percentage, come up with a list of random sample files from the + # source directories. + spot_check_config = next(check for check in config['checks'] if check['name'] == 'spot') + sample_count = max( + int(len(source_paths) * (spot_check_config['data_sample_percentage'] / 100)), 1 + ) + source_sample_paths = tuple(random.sample(source_paths, sample_count)) + existing_source_sample_paths = { + source_path for source_path in source_sample_paths if os.path.exists(source_path) + } + logger.debug( + f'{log_label}: Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check' + ) + + # Hash each file in the sample paths (if it exists). + hash_output = borgmatic.execute.execute_command_and_capture_output( + (spot_check_config.get('xxh64sum_command', 'xxh64sum'),) + + tuple(path for path in source_sample_paths if path in existing_source_sample_paths) + ) + + source_hashes = dict( + (reversed(line.split(' ', 1)) for line in hash_output.splitlines()), + **{path: '' for path in source_sample_paths if path not in existing_source_sample_paths}, + ) + + archive_hashes = dict( + reversed(line.split(' ', 1)) + for line in borgmatic.borg.list.capture_archive_listing( + repository['path'], + archive, + config, + local_borg_version, + global_arguments, + list_paths=source_sample_paths, + path_format='{xxh64} /{path}{NL}', # noqa: FS003 + local_path=local_path, + remote_path=remote_path, + ) + if line + ) + + # Compare the source hashes with the archive hashes to see how many match. + failing_paths = [] + + for path, source_hash in source_hashes.items(): + archive_hash = archive_hashes.get(path) + + if archive_hash is not None and archive_hash == source_hash: + continue + + failing_paths.append(path) + + return tuple(failing_paths) + + +def spot_check( + repository, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, +): + ''' + Given a repository dict, a loaded configuration dict, the local Borg version, global arguments + as an argparse.Namespace instance, the local Borg path, and the remote Borg path, perform a spot + check for the latest archive in the given repository. + + A spot check compares file counts and also the hashes for a random sampling of source files on + disk to those stored in the latest archive. If any differences are beyond configured tolerances, + then the check fails. + ''' + log_label = f'{repository.get("label", repository["path"])}' + logger.debug(f'{log_label}: Running spot check') + spot_check_config = next(check for check in config['checks'] if check['name'] == 'spot') + + if spot_check_config['data_tolerance_percentage'] > spot_check_config['data_sample_percentage']: + raise ValueError( + 'The data_tolerance_percentage must be less than or equal to the data_sample_percentage' + ) + + source_paths = collect_spot_check_source_paths( + repository, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, + ) + logger.debug(f'{log_label}: {len(source_paths)} total source paths for spot check') + + archive = borgmatic.borg.rlist.resolve_archive_name( + repository['path'], + 'latest', + config, + local_borg_version, + global_arguments, + local_path, + remote_path, + ) + logger.debug(f'{log_label}: Using archive {archive} for spot check') + + archive_paths = collect_spot_check_archive_paths( + repository, + archive, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, + ) + logger.debug(f'{log_label}: {len(archive_paths)} total archive paths for spot check') + + # Calculate the percentage delta between the source paths count and the archive paths count, and + # compare that delta to the configured count tolerance percentage. + count_delta_percentage = abs(len(source_paths) - len(archive_paths)) / len(source_paths) * 100 + + if count_delta_percentage > spot_check_config['count_tolerance_percentage']: + logger.debug( + f'{log_label}: Paths in source paths but not latest archive: {", ".join(set(source_paths) - set(archive_paths)) or "none"}' + ) + logger.debug( + f'{log_label}: Paths in latest archive but not source paths: {", ".join(set(archive_paths) - set(source_paths)) or "none"}' + ) + raise ValueError( + f'Spot check failed: {count_delta_percentage:.2f}% file count delta between source paths and latest archive (tolerance is {spot_check_config["count_tolerance_percentage"]}%)' + ) + + failing_paths = compare_spot_check_hashes( + repository, + archive, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, + log_label, + source_paths, + ) + + # Error if the percentage of failing hashes exceeds the configured tolerance percentage. + logger.debug(f'{log_label}: {len(failing_paths)} non-matching spot check hashes') + data_tolerance_percentage = spot_check_config['data_tolerance_percentage'] + failing_percentage = (len(failing_paths) / len(source_paths)) * 100 + + if failing_percentage > data_tolerance_percentage: + logger.debug( + f'{log_label}: Source paths with data not matching the latest archive: {", ".join(failing_paths)}' + ) + raise ValueError( + f'Spot check failed: {failing_percentage:.2f}% of source paths with data not matching the latest archive (tolerance is {data_tolerance_percentage}%)' + ) + + logger.info( + f'{log_label}: Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta' + ) + + def run_check( config_filename, repository, @@ -369,14 +645,16 @@ def run_check( ) write_check_time(make_check_time_path(config, repository_id, 'extract')) - # if 'spot' in checks: - # TODO: - # count the number of files in source directories, but need to take patterns and stuff into account... - # in a loop until the sample percentage (of the total source files) is met: - # pick a random file from source directories and calculate its sha256 sum - # extract the file from the latest archive (to stdout) and calculate its sha256 sum - # if the two checksums are equal, increment the matching files count - # if the percentage of matching files (of the total source files) < tolerance percentage, error + if 'spot' in checks: + spot_check( + repository, + config, + local_borg_version, + global_arguments, + local_path, + remote_path, + ) + write_check_time(make_check_time_path(config, repository_id, 'spot')) borgmatic.hooks.command.execute_hook( config.get('after_check'), diff --git a/borgmatic/actions/json.py b/borgmatic/actions/json.py index 0a71166..0e2a8c1 100644 --- a/borgmatic/actions/json.py +++ b/borgmatic/actions/json.py @@ -1,6 +1,5 @@ -import logging import json - +import logging logger = logging.getLogger(__name__) diff --git a/borgmatic/borg/create.py b/borgmatic/borg/create.py index 59fa1a2..1cb9563 100644 --- a/borgmatic/borg/create.py +++ b/borgmatic/borg/create.py @@ -275,11 +275,11 @@ def collect_special_file_paths( create_command, config, local_path, working_directory, borg_environment, skip_directories ): ''' - Given a Borg create command as a tuple, a local Borg path, a working directory, a dict of - environment variables to pass to Borg, and a sequence of parent directories to skip, collect the - paths for any special files (character devices, block devices, and named pipes / FIFOs) that - Borg would encounter during a create. These are all paths that could cause Borg to hang if its - --read-special flag is used. + Given a Borg create command as a tuple, a configuration dict, a local Borg path, a working + directory, a dict of environment variables to pass to Borg, and a sequence of parent directories + to skip, collect the paths for any special files (character devices, block devices, and named + pipes / FIFOs) that Borg would encounter during a create. These are all paths that could cause + Borg to hang if its --read-special flag is used. ''' # Omit "--exclude-nodump" from the Borg dry run command, because that flag causes Borg to open # files including any named pipe we've created. @@ -402,11 +402,6 @@ def make_base_create_command( ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) - if stream_processes and config.get('read_special') is False: - logger.warning( - f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' - ) - create_flags = ( tuple(local_path.split(' ')) + ('create',) @@ -442,6 +437,41 @@ def make_base_create_command( repository_path, archive_name_format, local_borg_version ) + (sources if not pattern_file else ()) + # If database hooks are enabled (as indicated by streaming processes), exclude files that might + # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. + if stream_processes and not config.get('read_special'): + logger.warning( + f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' + ) + try: + working_directory = os.path.expanduser(config.get('working_directory')) + except TypeError: + working_directory = None + + borg_environment = environment.make_environment(config) + + logger.debug(f'{repository_path}: Collecting special file paths') + special_file_paths = collect_special_file_paths( + create_flags + create_positional_arguments, + config, + local_path, + working_directory, + borg_environment, + skip_directories=borgmatic_source_directories, + ) + + if special_file_paths: + logger.warning( + f'{repository_path}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}' + ) + exclude_file = write_pattern_file( + expand_home_directories( + tuple(config.get('exclude_patterns') or ()) + special_file_paths + ), + pattern_file=exclude_file, + ) + create_flags += make_exclude_flags(config, exclude_file.name) + return (create_flags, create_positional_arguments, pattern_file, exclude_file) @@ -509,31 +539,6 @@ def create_archive( borg_environment = environment.make_environment(config) - # If database hooks are enabled (as indicated by streaming processes), exclude files that might - # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. - if stream_processes and not config.get('read_special'): - logger.debug(f'{repository_path}: Collecting special file paths') - special_file_paths = collect_special_file_paths( - create_flags + create_positional_arguments, - config, - local_path, - working_directory, - borg_environment, - skip_directories=borgmatic_source_directories, - ) - - if special_file_paths: - logger.warning( - f'{repository_path}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}' - ) - exclude_file = write_pattern_file( - expand_home_directories( - tuple(config.get('exclude_patterns') or ()) + special_file_paths - ), - pattern_file=exclude_file, - ) - create_flags += make_exclude_flags(config, exclude_file.name) - create_flags += ( (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) + (('--stats',) if stats and not json and not dry_run else ()) diff --git a/borgmatic/borg/list.py b/borgmatic/borg/list.py index a824c47..98096c6 100644 --- a/borgmatic/borg/list.py +++ b/borgmatic/borg/list.py @@ -95,14 +95,15 @@ def capture_archive_listing( local_borg_version, global_arguments, list_paths=None, + path_format=None, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an archive name, a configuration dict, the local Borg - version, global arguments as an argparse.Namespace, the archive paths in which to list files, and - local and remote Borg paths, capture the output of listing that archive and return it as a list - of file paths. + version, global arguments as an argparse.Namespace, the archive paths in which to list files, + the Borg path format to use for the output, and local and remote Borg paths, capture the output + of listing that archive and return it as a list of file paths. ''' borg_environment = environment.make_environment(config) @@ -118,7 +119,7 @@ def capture_archive_listing( paths=[f'sh:{path}' for path in list_paths] if list_paths else None, find_paths=None, json=None, - format='{path}{NL}', # noqa: FS003 + format=path_format or '{path}{NL}', # noqa: FS003 ), global_arguments, local_path, diff --git a/borgmatic/commands/arguments.py b/borgmatic/commands/arguments.py index df0039b..c65ae71 100644 --- a/borgmatic/commands/arguments.py +++ b/borgmatic/commands/arguments.py @@ -614,10 +614,10 @@ def make_parsers(): check_group.add_argument( '--only', metavar='CHECK', - choices=('repository', 'archives', 'data', 'extract'), + choices=('repository', 'archives', 'data', 'extract', 'spot'), dest='only_checks', action='append', - help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks (subject to configured frequency, can specify flag multiple times)', + help='Run a particular consistency check (repository, archives, data, extract, or spot) instead of configured checks (subject to configured frequency, can specify flag multiple times)', ) check_group.add_argument( '--force', diff --git a/borgmatic/config/schema.yaml b/borgmatic/config/schema.yaml index 9e4759b..da02358 100644 --- a/borgmatic/config/schema.yaml +++ b/borgmatic/config/schema.yaml @@ -543,8 +543,9 @@ properties: example: 2 weeks - required: - name - - sample_percentage - - tolerance_percentage + - data_sample_percentage + - data_tolerance_percentage + - count_tolerance_percentage additionalProperties: false properties: name: @@ -577,25 +578,45 @@ properties: "always": running this check every time checks are run. example: 2 weeks - sample_percentage: + count_tolerance_percentage: + type: number + description: | + The percentage delta between the source + directories file count and the most recent backup + archive file count that is allowed before the + entire consistency check fails. This can catch + problems like incorrect excludes, inadvertent + deletes, etc. Only applies to the "spot" check. + example: 10 + data_sample_percentage: type: number description: | The percentage of total files in the source directories to randomly sample and compare to their corresponding files in the most recent - backup archive. Only applies to the "spot" - check. - example: 5 - tolerance_percentage: + backup archive. Only applies to the "spot" check. + example: 1 + data_tolerance_percentage: type: number description: | The percentage of total files in the source - directories that can fail a spot check - comparison without failing the entire - consistency check. Should be lower than or - equal to the "sample_percentage". Only applies - to the "spot" check. + directories that can fail a spot check comparison + without failing the entire consistency check. This + can catch problems like source files that have + been bulk-changed by malware, backups that have + been tampered with, etc. The value must be lower + than or equal to the "contents_sample_percentage". + Only applies to the "spot" check. example: 0.5 + xxh64sum_command: + type: string + description: | + Command to use instead of "xxh64sum" to hash + source files, usually found in an OS package named + "xxhash". Do not substitute with a different hash + type (SHA, MD5, etc.) or the check will never + succeed. Only applies to the "spot" check. + example: /usr/local/bin/xxh64sum description: | List of one or more consistency checks to run on a periodic basis (if "frequency" is set) or every time borgmatic runs checks (if diff --git a/borgmatic/execute.py b/borgmatic/execute.py index df4b029..b0712cd 100644 --- a/borgmatic/execute.py +++ b/borgmatic/execute.py @@ -4,6 +4,7 @@ import logging import os import select import subprocess +import textwrap logger = logging.getLogger(__name__) @@ -219,13 +220,22 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path, b } +MAX_LOGGED_COMMAND_LENGTH = 1000 + + def log_command(full_command, input_file=None, output_file=None, environment=None): ''' Log the given command (a sequence of command/argument strings), along with its input/output file paths and extra environment variables (with omitted values in case they contain passwords). ''' logger.debug( - ' '.join(tuple(f'{key}=***' for key in (environment or {}).keys()) + tuple(full_command)) + textwrap.shorten( + ' '.join( + tuple(f'{key}=***' for key in (environment or {}).keys()) + tuple(full_command) + ), + width=MAX_LOGGED_COMMAND_LENGTH, + placeholder=' ...', + ) + (f" < {getattr(input_file, 'name', '')}" if input_file else '') + (f" > {getattr(output_file, 'name', '')}" if output_file else '') ) diff --git a/borgmatic/hooks/mariadb.py b/borgmatic/hooks/mariadb.py index 6740556..9aa619f 100644 --- a/borgmatic/hooks/mariadb.py +++ b/borgmatic/hooks/mariadb.py @@ -115,6 +115,14 @@ def execute_dump_command( ) +def use_streaming(databases, config, log_prefix): + ''' + Given a sequence of MariaDB database configuration dicts, a configuration dict (ignored), and a + log prefix (ignored), return whether streaming will be using during dumps. + ''' + return any(databases) + + def dump_data_sources(databases, config, log_prefix, dry_run): ''' Dump the given MariaDB databases to a named pipe. The databases are supplied as a sequence of diff --git a/borgmatic/hooks/mongodb.py b/borgmatic/hooks/mongodb.py index dbea768..efb602f 100644 --- a/borgmatic/hooks/mongodb.py +++ b/borgmatic/hooks/mongodb.py @@ -16,6 +16,14 @@ def make_dump_path(config): # pragma: no cover ) +def use_streaming(databases, config, log_prefix): + ''' + Given a sequence of MongoDB database configuration dicts, a configuration dict (ignored), and a + log prefix (ignored), return whether streaming will be using during dumps. + ''' + return any(database.get('format') != 'directory' for database in databases) + + def dump_data_sources(databases, config, log_prefix, dry_run): ''' Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of diff --git a/borgmatic/hooks/mysql.py b/borgmatic/hooks/mysql.py index 46f7657..8ffc778 100644 --- a/borgmatic/hooks/mysql.py +++ b/borgmatic/hooks/mysql.py @@ -114,6 +114,14 @@ def execute_dump_command( ) +def use_streaming(databases, config, log_prefix): + ''' + Given a sequence of MySQL database configuration dicts, a configuration dict (ignored), and a + log prefix (ignored), return whether streaming will be using during dumps. + ''' + return any(databases) + + def dump_data_sources(databases, config, log_prefix, dry_run): ''' Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence diff --git a/borgmatic/hooks/postgresql.py b/borgmatic/hooks/postgresql.py index fbbbe4f..74b9f37 100644 --- a/borgmatic/hooks/postgresql.py +++ b/borgmatic/hooks/postgresql.py @@ -96,6 +96,14 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run): ) +def use_streaming(databases, config, log_prefix): + ''' + Given a sequence of PostgreSQL database configuration dicts, a configuration dict (ignored), and + a log prefix (ignored), return whether streaming will be using during dumps. + ''' + return any(database.get('format') != 'directory' for database in databases) + + def dump_data_sources(databases, config, log_prefix, dry_run): ''' Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of diff --git a/borgmatic/hooks/sqlite.py b/borgmatic/hooks/sqlite.py index 5ac55fe..c7b494b 100644 --- a/borgmatic/hooks/sqlite.py +++ b/borgmatic/hooks/sqlite.py @@ -17,9 +17,17 @@ def make_dump_path(config): # pragma: no cover ) +def use_streaming(databases, config, log_prefix): + ''' + Given a sequence of SQLite database configuration dicts, a configuration dict (ignored), and a + log prefix (ignored), return whether streaming will be using during dumps. + ''' + return any(databases) + + def dump_data_sources(databases, config, log_prefix, dry_run): ''' - Dump the given SQLite3 databases to a named pipe. The databases are supplied as a sequence of + Dump the given SQLite databases to a named pipe. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. Use the given configuration dict to construct the destination path and the given log prefix in any log entries. @@ -71,7 +79,7 @@ def dump_data_sources(databases, config, log_prefix, dry_run): def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: no cover ''' - Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a + Remove the given SQLite database dumps from the filesystem. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. Use the given configuration dict to construct the destination path and the given log prefix in any log entries. If this is a dry run, then don't actually remove anything. @@ -81,8 +89,8 @@ def remove_data_source_dumps(databases, config, log_prefix, dry_run): # pragma: def make_data_source_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover ''' - Make a pattern that matches the given SQLite3 databases. The databases are supplied as a - sequence of configuration dicts, as per the configuration schema. + Make a pattern that matches the given SQLite databases. The databases are supplied as a sequence + of configuration dicts, as per the configuration schema. ''' return dump.make_data_source_dump_filename(make_dump_path(config), name) diff --git a/tests/unit/actions/test_check.py b/tests/unit/actions/test_check.py index dfca886..8246ec5 100644 --- a/tests/unit/actions/test_check.py +++ b/tests/unit/actions/test_check.py @@ -409,6 +409,444 @@ def test_upgrade_check_times_renames_stale_temporary_check_path(): module.upgrade_check_times(flexmock(), flexmock()) +def test_collect_spot_check_source_paths_parses_borg_output(): + flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return( + {'hook1': False, 'hook2': True} + ) + flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args( + dry_run=True, + repository_path='repo', + config=object, + config_paths=(), + local_borg_version=object, + global_arguments=object, + borgmatic_source_directories=(), + local_path=object, + remote_path=object, + list_files=True, + stream_processes=True, + ).and_return((('borg', 'create'), ('repo::archive',), flexmock(), flexmock())) + flexmock(module.borgmatic.borg.environment).should_receive('make_environment').and_return( + flexmock() + ) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).and_return( + 'warning: stuff\n- /etc/path\n+ /etc/other\n? /nope', + ) + flexmock(module.os.path).should_receive('isfile').and_return(True) + + assert module.collect_spot_check_source_paths( + repository={'path': 'repo'}, + config={'working_directory': '/'}, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) == ('/etc/path', '/etc/other') + + +def test_collect_spot_check_source_paths_passes_through_stream_processes_false(): + flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return( + {'hook1': False, 'hook2': False} + ) + flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args( + dry_run=True, + repository_path='repo', + config=object, + config_paths=(), + local_borg_version=object, + global_arguments=object, + borgmatic_source_directories=(), + local_path=object, + remote_path=object, + list_files=True, + stream_processes=False, + ).and_return((('borg', 'create'), ('repo::archive',), flexmock(), flexmock())) + flexmock(module.borgmatic.borg.environment).should_receive('make_environment').and_return( + flexmock() + ) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).and_return( + 'warning: stuff\n- /etc/path\n+ /etc/other\n? /nope', + ) + flexmock(module.os.path).should_receive('isfile').and_return(True) + + assert module.collect_spot_check_source_paths( + repository={'path': 'repo'}, + config={'working_directory': '/'}, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) == ('/etc/path', '/etc/other') + + +def test_collect_spot_check_source_paths_without_working_directory_parses_borg_output(): + flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return( + {'hook1': False, 'hook2': True} + ) + flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args( + dry_run=True, + repository_path='repo', + config=object, + config_paths=(), + local_borg_version=object, + global_arguments=object, + borgmatic_source_directories=(), + local_path=object, + remote_path=object, + list_files=True, + stream_processes=True, + ).and_return((('borg', 'create'), ('repo::archive',), flexmock(), flexmock())) + flexmock(module.borgmatic.borg.environment).should_receive('make_environment').and_return( + flexmock() + ) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).and_return( + 'warning: stuff\n- /etc/path\n+ /etc/other\n? /nope', + ) + flexmock(module.os.path).should_receive('isfile').and_return(True) + + assert module.collect_spot_check_source_paths( + repository={'path': 'repo'}, + config={}, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) == ('/etc/path', '/etc/other') + + +def test_collect_spot_check_source_paths_includes_symlinks_but_skips_directories(): + flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return( + {'hook1': False, 'hook2': True} + ) + flexmock(module.borgmatic.borg.create).should_receive('make_base_create_command').with_args( + dry_run=True, + repository_path='repo', + config=object, + config_paths=(), + local_borg_version=object, + global_arguments=object, + borgmatic_source_directories=(), + local_path=object, + remote_path=object, + list_files=True, + stream_processes=True, + ).and_return((('borg', 'create'), ('repo::archive',), flexmock(), flexmock())) + flexmock(module.borgmatic.borg.environment).should_receive('make_environment').and_return( + flexmock() + ) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).and_return( + 'warning: stuff\n- /etc/path\n+ /etc/dir\n? /nope', + ) + flexmock(module.os.path).should_receive('isfile').with_args('/etc/path').and_return(False) + flexmock(module.os.path).should_receive('islink').with_args('/etc/path').and_return(True) + flexmock(module.os.path).should_receive('isfile').with_args('/etc/dir').and_return(False) + flexmock(module.os.path).should_receive('islink').with_args('/etc/dir').and_return(False) + + assert module.collect_spot_check_source_paths( + repository={'path': 'repo'}, + config={'working_directory': '/'}, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) == ('/etc/path',) + + +def test_collect_spot_check_archive_paths_excludes_directories(): + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ( + 'f /etc/path', + 'f /etc/other', + 'd /etc/dir', + ) + ) + + assert module.collect_spot_check_archive_paths( + repository={'path': 'repo'}, + archive='archive', + config={}, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) == ('/etc/path', '/etc/other') + + +def test_collect_spot_check_archive_paths_excludes_file_in_borgmatic_source_directory(): + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ( + 'f /etc/path', + 'f /root/.borgmatic/some/thing', + ) + ) + + assert module.collect_spot_check_archive_paths( + repository={'path': 'repo'}, + archive='archive', + config={'borgmatic_source_directory': '/root/.borgmatic'}, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) == ('/etc/path',) + + +def test_compare_spot_check_hashes_returns_paths_having_failing_hashes(): + flexmock(module.random).should_receive('sample').replace_with( + lambda population, count: population[:count] + ) + flexmock(module.os.path).should_receive('exists').and_return(True) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).with_args(('xxh64sum', '/foo', '/bar')).and_return('hash1 /foo\nhash2 /bar') + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ['hash1 /foo', 'nothash2 /bar'] + ) + + assert module.compare_spot_check_hashes( + repository={'path': 'repo'}, + archive='archive', + config={ + 'checks': [ + { + 'name': 'archives', + 'frequency': '2 weeks', + }, + { + 'name': 'spot', + 'data_sample_percentage': 50, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + log_label='repo', + source_paths=('/foo', '/bar', '/baz', '/quux'), + ) == ('/bar',) + + +def test_compare_spot_check_hashes_uses_xxh64sum_command_option(): + flexmock(module.random).should_receive('sample').replace_with( + lambda population, count: population[:count] + ) + flexmock(module.os.path).should_receive('exists').and_return(True) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).with_args(('/usr/local/bin/xxh64sum', '/foo', '/bar')).and_return('hash1 /foo\nhash2 /bar') + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ['hash1 /foo', 'nothash2 /bar'] + ) + + assert module.compare_spot_check_hashes( + repository={'path': 'repo'}, + archive='archive', + config={ + 'checks': [ + { + 'name': 'spot', + 'data_sample_percentage': 50, + 'xxh64sum_command': '/usr/local/bin/xxh64sum', + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + log_label='repo', + source_paths=('/foo', '/bar', '/baz', '/quux'), + ) == ('/bar',) + + +def test_compare_spot_check_hashes_consider_path_missing_from_archive_as_not_matching(): + flexmock(module.random).should_receive('sample').replace_with( + lambda population, count: population[:count] + ) + flexmock(module.os.path).should_receive('exists').and_return(True) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).with_args(('xxh64sum', '/foo', '/bar')).and_return('hash1 /foo\nhash2 /bar') + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ['hash1 /foo'] + ) + + assert module.compare_spot_check_hashes( + repository={'path': 'repo'}, + archive='archive', + config={ + 'checks': [ + { + 'name': 'spot', + 'data_sample_percentage': 50, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + log_label='repo', + source_paths=('/foo', '/bar', '/baz', '/quux'), + ) == ('/bar',) + + +def test_compare_spot_check_hashes_considers_non_existent_path_as_not_matching(): + flexmock(module.random).should_receive('sample').replace_with( + lambda population, count: population[:count] + ) + flexmock(module.os.path).should_receive('exists').with_args('/foo').and_return(True) + flexmock(module.os.path).should_receive('exists').with_args('/bar').and_return(False) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).with_args(('xxh64sum', '/foo')).and_return('hash1 /foo') + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ['hash1 /foo', 'hash2 /bar'] + ) + + assert module.compare_spot_check_hashes( + repository={'path': 'repo'}, + archive='archive', + config={ + 'checks': [ + { + 'name': 'spot', + 'data_sample_percentage': 50, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + log_label='repo', + source_paths=('/foo', '/bar', '/baz', '/quux'), + ) == ('/bar',) + + +def test_spot_check_data_tolerance_percenatge_greater_than_data_sample_percentage_errors(): + with pytest.raises(ValueError): + module.spot_check( + repository={'path': 'repo'}, + config={ + 'checks': [ + { + 'name': 'spot', + 'data_tolerance_percentage': 7, + 'data_sample_percentage': 5, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) + + +def test_spot_check_with_count_delta_greater_than_count_tolerance_percentage_errors(): + flexmock(module).should_receive('collect_spot_check_source_paths').and_return( + ('/foo', '/bar', '/baz', '/quux') + ) + flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( + 'archive' + ) + flexmock(module).should_receive('collect_spot_check_archive_paths').and_return( + ('/foo', '/bar') + ).once() + + with pytest.raises(ValueError): + module.spot_check( + repository={'path': 'repo'}, + config={ + 'checks': [ + { + 'name': 'spot', + 'count_tolerance_percentage': 1, + 'data_tolerance_percentage': 4, + 'data_sample_percentage': 5, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) + + +def test_spot_check_with_failing_percentage_greater_than_data_tolerance_percentage_errors(): + flexmock(module).should_receive('collect_spot_check_source_paths').and_return( + ('/foo', '/bar', '/baz', '/quux') + ) + flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( + 'archive' + ) + flexmock(module).should_receive('collect_spot_check_archive_paths').and_return(('/foo', '/bar')) + flexmock(module).should_receive('compare_spot_check_hashes').and_return( + ('/bar', '/baz', '/quux') + ).once() + + with pytest.raises(ValueError): + module.spot_check( + repository={'path': 'repo'}, + config={ + 'checks': [ + { + 'name': 'spot', + 'count_tolerance_percentage': 55, + 'data_tolerance_percentage': 4, + 'data_sample_percentage': 5, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) + + +def test_spot_check_with_high_enough_tolerances_does_not_raise(): + flexmock(module).should_receive('collect_spot_check_source_paths').and_return( + ('/foo', '/bar', '/baz', '/quux') + ) + flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( + 'archive' + ) + flexmock(module).should_receive('collect_spot_check_archive_paths').and_return(('/foo', '/bar')) + flexmock(module).should_receive('compare_spot_check_hashes').and_return( + ('/bar', '/baz', '/quux') + ).once() + + module.spot_check( + repository={'path': 'repo'}, + config={ + 'checks': [ + { + 'name': 'spot', + 'count_tolerance_percentage': 55, + 'data_tolerance_percentage': 80, + 'data_sample_percentage': 80, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + ) + + def test_run_check_checks_archives_for_configured_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() @@ -483,6 +921,42 @@ def test_run_check_runs_configured_extract_check(): ) +def test_run_check_runs_configured_spot_check(): + flexmock(module.logger).answer = lambda message: None + flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() + flexmock(module.borgmatic.borg.check).should_receive('get_repository_id').and_return(flexmock()) + flexmock(module).should_receive('upgrade_check_times') + flexmock(module).should_receive('parse_checks') + flexmock(module.borgmatic.borg.check).should_receive('make_archive_filter_flags').and_return(()) + flexmock(module).should_receive('make_archives_check_id').and_return(None) + flexmock(module).should_receive('filter_checks_on_frequency').and_return({'spot'}) + flexmock(module.borgmatic.borg.check).should_receive('check_archives').never() + flexmock(module.borgmatic.actions.check).should_receive('spot_check').once() + flexmock(module).should_receive('make_check_time_path') + flexmock(module).should_receive('write_check_time') + flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) + check_arguments = flexmock( + repository=None, + progress=flexmock(), + repair=flexmock(), + only_checks=flexmock(), + force=flexmock(), + ) + global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) + + module.run_check( + config_filename='test.yaml', + repository={'path': 'repo'}, + config={'repositories': ['repo']}, + hook_context={}, + local_borg_version=None, + check_arguments=check_arguments, + global_arguments=global_arguments, + local_path=None, + remote_path=None, + ) + + def test_run_check_without_checks_runs_nothing_except_hooks(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() diff --git a/tests/unit/actions/test_json.py b/tests/unit/actions/test_json.py index 3908286..c0b4dec 100644 --- a/tests/unit/actions/test_json.py +++ b/tests/unit/actions/test_json.py @@ -1,5 +1,4 @@ import pytest -from flexmock import flexmock from borgmatic.actions import json as module diff --git a/tests/unit/borg/test_create.py b/tests/unit/borg/test_create.py index dd27961..3e3dd13 100644 --- a/tests/unit/borg/test_create.py +++ b/tests/unit/borg/test_create.py @@ -948,7 +948,7 @@ def test_make_base_create_command_includes_list_flags_in_borg_command(): assert not exclude_file -def test_make_base_create_command_with_stream_processes_ignores_read_special_false_and_logs_warning(): +def test_make_base_create_command_with_stream_processes_ignores_read_special_false_and_excludes_special_files(): flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) @@ -959,12 +959,17 @@ def test_make_base_create_command_with_stream_processes_ignores_read_special_fal flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') - flexmock(module.logger).should_receive('warning').once() flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) + flexmock(module.logger).should_receive('warning').twice() + flexmock(module.environment).should_receive('make_environment') + flexmock(module).should_receive('collect_special_file_paths').and_return(('/dev/null',)).once() + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(flexmock(name='patterns')) + flexmock(module).should_receive('make_exclude_flags').and_return(()) (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( module.make_base_create_command( @@ -983,6 +988,48 @@ def test_make_base_create_command_with_stream_processes_ignores_read_special_fal ) ) + assert create_flags == ('borg', 'create', '--one-file-system', '--read-special') + assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS + assert not pattern_file + assert exclude_file + + +def test_make_base_create_command_with_stream_processes_and_read_special_true_skip_special_files_excludes(): + flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) + flexmock(module).should_receive('map_directories_to_devices').and_return({}) + flexmock(module).should_receive('expand_directories').and_return(()) + flexmock(module).should_receive('pattern_root_directories').and_return([]) + flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) + flexmock(module).should_receive('expand_home_directories').and_return(()) + flexmock(module).should_receive('write_pattern_file').and_return(None) + flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') + flexmock(module.feature).should_receive('available').and_return(True) + flexmock(module).should_receive('ensure_files_readable') + flexmock(module).should_receive('make_pattern_flags').and_return(()) + flexmock(module).should_receive('make_exclude_flags').and_return(()) + flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( + (f'repo::{DEFAULT_ARCHIVE_NAME}',) + ) + flexmock(module.logger).should_receive('warning').never() + flexmock(module).should_receive('collect_special_file_paths').never() + + (create_flags, create_positional_arguments, pattern_file, exclude_file) = ( + module.make_base_create_command( + dry_run=False, + repository_path='repo', + config={ + 'source_directories': ['foo', 'bar'], + 'repositories': ['repo'], + 'read_special': True, + }, + config_paths=['/tmp/test.yaml'], + local_borg_version='1.2.3', + global_arguments=flexmock(log_json=False), + borgmatic_source_directories=(), + stream_processes=flexmock(), + ) + ) + assert create_flags == ('borg', 'create', '--one-file-system', '--read-special') assert create_positional_arguments == REPO_ARCHIVE_WITH_PATHS assert not pattern_file @@ -1709,7 +1756,6 @@ def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progr ) ) flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').and_return(()) create_command = ( 'borg', 'create', @@ -1754,127 +1800,6 @@ def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progr ) -def test_create_archive_with_stream_processes_ands_read_special_false_excludes_special_files(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - processes = flexmock() - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('make_base_create_command').and_return( - ( - ('borg', 'create', '--one-file-system', '--read-special'), - REPO_ARCHIVE_WITH_PATHS, - flexmock(), - flexmock(), - ) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').and_return(('/dev/null',)) - flexmock(module).should_receive('expand_home_directories').and_return(()) - flexmock(module).should_receive('write_pattern_file').and_return(flexmock(name='patterns')) - flexmock(module).should_receive('make_exclude_flags').and_return(()) - create_command = ( - 'borg', - 'create', - '--one-file-system', - '--read-special', - ) + REPO_ARCHIVE_WITH_PATHS - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_command + ('--dry-run', '--list'), - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_command, - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'read_special': False, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - stream_processes=processes, - ) - - -def test_create_archive_with_stream_processes_and_read_special_true_skips_special_files_excludes(): - flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') - flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER - processes = flexmock() - flexmock(module).should_receive('expand_directories').and_return(()) - flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) - flexmock(module).should_receive('make_base_create_command').and_return( - ( - ('borg', 'create', '--one-file-system', '--read-special'), - REPO_ARCHIVE_WITH_PATHS, - flexmock(), - flexmock(), - ) - ) - flexmock(module.environment).should_receive('make_environment') - flexmock(module).should_receive('collect_special_file_paths').never() - create_command = ( - 'borg', - 'create', - '--one-file-system', - '--read-special', - ) + REPO_ARCHIVE_WITH_PATHS - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_command + ('--dry-run', '--list'), - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - flexmock(module).should_receive('execute_command_with_processes').with_args( - create_command, - processes=processes, - output_log_level=logging.INFO, - output_file=None, - borg_local_path='borg', - borg_exit_codes=None, - working_directory=None, - extra_environment=None, - ) - - module.create_archive( - dry_run=False, - repository_path='repo', - config={ - 'source_directories': ['foo', 'bar'], - 'repositories': ['repo'], - 'exclude_patterns': None, - 'read_special': True, - }, - config_paths=['/tmp/test.yaml'], - local_borg_version='1.2.3', - global_arguments=flexmock(log_json=False), - stream_processes=processes, - ) - - def test_create_archive_with_json_calls_borg_with_json_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER diff --git a/tests/unit/hooks/test_mariadb.py b/tests/unit/hooks/test_mariadb.py index 15394a5..8034afe 100644 --- a/tests/unit/hooks/test_mariadb.py +++ b/tests/unit/hooks/test_mariadb.py @@ -44,6 +44,16 @@ def test_database_names_to_dump_queries_mariadb_for_database_names(): assert names == ('foo', 'bar') +def test_use_streaming_true_for_any_databases(): + assert module.use_streaming( + databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock() + ) + + +def test_use_streaming_false_for_no_databases(): + assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock()) + + def test_dump_data_sources_dumps_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] diff --git a/tests/unit/hooks/test_mongodb.py b/tests/unit/hooks/test_mongodb.py index 2fab040..86ac7b7 100644 --- a/tests/unit/hooks/test_mongodb.py +++ b/tests/unit/hooks/test_mongodb.py @@ -5,6 +5,26 @@ from flexmock import flexmock from borgmatic.hooks import mongodb as module +def test_use_streaming_true_for_any_non_directory_format_databases(): + assert module.use_streaming( + databases=[{'format': 'stuff'}, {'format': 'directory'}, {}], + config=flexmock(), + log_prefix=flexmock(), + ) + + +def test_use_streaming_false_for_all_directory_format_databases(): + assert not module.use_streaming( + databases=[{'format': 'directory'}, {'format': 'directory'}], + config=flexmock(), + log_prefix=flexmock(), + ) + + +def test_use_streaming_false_for_no_databases(): + assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock()) + + def test_dump_data_sources_runs_mongodump_for_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] diff --git a/tests/unit/hooks/test_mysql.py b/tests/unit/hooks/test_mysql.py index 3560a99..fa6145a 100644 --- a/tests/unit/hooks/test_mysql.py +++ b/tests/unit/hooks/test_mysql.py @@ -44,6 +44,16 @@ def test_database_names_to_dump_queries_mysql_for_database_names(): assert names == ('foo', 'bar') +def test_use_streaming_true_for_any_databases(): + assert module.use_streaming( + databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock() + ) + + +def test_use_streaming_false_for_no_databases(): + assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock()) + + def test_dump_data_sources_dumps_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] diff --git a/tests/unit/hooks/test_postgresql.py b/tests/unit/hooks/test_postgresql.py index 4e8375b..87f5dc0 100644 --- a/tests/unit/hooks/test_postgresql.py +++ b/tests/unit/hooks/test_postgresql.py @@ -199,6 +199,26 @@ def test_database_names_to_dump_with_all_and_psql_command_uses_custom_command(): ) +def test_use_streaming_true_for_any_non_directory_format_databases(): + assert module.use_streaming( + databases=[{'format': 'stuff'}, {'format': 'directory'}, {}], + config=flexmock(), + log_prefix=flexmock(), + ) + + +def test_use_streaming_false_for_all_directory_format_databases(): + assert not module.use_streaming( + databases=[{'format': 'directory'}, {'format': 'directory'}], + config=flexmock(), + log_prefix=flexmock(), + ) + + +def test_use_streaming_false_for_no_databases(): + assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock()) + + def test_dump_data_sources_runs_pg_dump_for_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] diff --git a/tests/unit/hooks/test_sqlite.py b/tests/unit/hooks/test_sqlite.py index d74e56a..ea560e3 100644 --- a/tests/unit/hooks/test_sqlite.py +++ b/tests/unit/hooks/test_sqlite.py @@ -5,6 +5,16 @@ from flexmock import flexmock from borgmatic.hooks import sqlite as module +def test_use_streaming_true_for_any_databases(): + assert module.use_streaming( + databases=[flexmock(), flexmock()], config=flexmock(), log_prefix=flexmock() + ) + + +def test_use_streaming_false_for_no_databases(): + assert not module.use_streaming(databases=[], config=flexmock(), log_prefix=flexmock()) + + def test_dump_data_sources_logs_and_skips_if_dump_already_exists(): databases = [{'path': '/path/to/database', 'name': 'database'}] diff --git a/tests/unit/test_execute.py b/tests/unit/test_execute.py index c1499b8..2d01c80 100644 --- a/tests/unit/test_execute.py +++ b/tests/unit/test_execute.py @@ -123,6 +123,13 @@ def test_append_last_lines_with_output_log_level_none_appends_captured_output(): (('foo', 'bar'), None, None, None, 'foo bar'), (('foo', 'bar'), flexmock(name='input'), None, None, 'foo bar < input'), (('foo', 'bar'), None, flexmock(name='output'), None, 'foo bar > output'), + ( + ('A',) * module.MAX_LOGGED_COMMAND_LENGTH, + None, + None, + None, + 'A ' * (module.MAX_LOGGED_COMMAND_LENGTH // 2 - 2) + '...', + ), ( ('foo', 'bar'), flexmock(name='input'), From d243a8c836ef6401641ae5b0affb45dc780553c7 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 15 Apr 2024 12:51:07 -0700 Subject: [PATCH 4/5] Add spot check documentation (#656). --- borgmatic/config/schema.yaml | 2 +- docs/how-to/deal-with-very-large-backups.md | 77 ++++++++++++++++++++- 2 files changed, 77 insertions(+), 2 deletions(-) diff --git a/borgmatic/config/schema.yaml b/borgmatic/config/schema.yaml index da02358..dacc410 100644 --- a/borgmatic/config/schema.yaml +++ b/borgmatic/config/schema.yaml @@ -543,9 +543,9 @@ properties: example: 2 weeks - required: - name + - count_tolerance_percentage - data_sample_percentage - data_tolerance_percentage - - count_tolerance_percentage additionalProperties: false properties: name: diff --git a/docs/how-to/deal-with-very-large-backups.md b/docs/how-to/deal-with-very-large-backups.md index 18a00e2..b4ba3fd 100644 --- a/docs/how-to/deal-with-very-large-backups.md +++ b/docs/how-to/deal-with-very-large-backups.md @@ -91,8 +91,9 @@ Here are the available checks from fastest to slowest: * `repository`: Checks the consistency of the repository itself. * `archives`: Checks all of the archives in the repository. - * `extract`: Performs an extraction dry-run of the most recent archive. + * `extract`: Performs an extraction dry-run of the latest archive. * `data`: Verifies the data integrity of all archives contents, decrypting and decompressing all data. + * `spot`: Compares file counts and contents between your source files and the latest archive. Note that the `data` check is a more thorough version of the `archives` check, so enabling the `data` check implicitly enables the `archives` check as well. @@ -102,6 +103,80 @@ documentation](https://borgbackup.readthedocs.io/en/stable/usage/check.html) for more information. +### Spot check + +The various consistency checks all have trade-offs around speed and +thoroughness, but most of them don't even look at your original source +files—arguably one important way to ensure your backups contain the files +you'll ultimately want to restore in the case of catastrophe (or just an +accidentally deleted file). Because if something goes wrong with your source +files, most consistency checks will still pass with flying colors and you +won't discover there's a problem until you go to restore. + +New in version 1.8.10 Beta feature That's where the spot +check comes in. This check actually compares your source files counts and data +against those in the latest archive, potentially catching problems like +incorrect excludes, inadvertent deletes, files changed by malware, etc. + +However, because an exhaustive comparison of all source files against the +latest archive might be too slow, the spot check supports sampling a +percentage of your source files for the comparison, ensuring it falls within +configured tolerances. + +Here's how to use it. Start by installing the `xxhash` OS package if you don't +already have it, so the spot check can run the `xxh64sum` command and +efficiently hash files for comparison. Then add something like the following +to your borgmatic configuration: + +```yaml +checks: + - name: spot + count_tolerance_percentage: 10 + data_sample_percentage: 1 + data_tolerance_percentage: 0.5 +``` + +The `count_tolerance_percentage` is the percentage delta between the source +directories file count and the latest backup archive file count that is +allowed before the entire consistency check fails. For instance, if the spot +check runs and finds 100 source files and 105 files in the latest archive, +that would be within a 10% count tolerance and the check would succeed. But if +there were 100 source files and 200 archive files, the check would fail. (100 +source files and only 50 archive files would also fail.) + +The `data_sample_percentage` is the percentage of total files in the source +directories to randomly sample and compare to their corresponding files in the +latest backup archive. The comparison is performed by hashing the selected +files in each of the source paths and the backup archive and counting hashes +that don't match. For instance, if you have 1,000 source files and your sample +percentage is 1%, then only 10 source files will be compared against the +latest archive. These sampled files are selected randomly each time, so in +effect the spot check is a probabilistic check. + +The `data_tolerance_percentage` is the percentage of total files in the source +directories that can fail a spot check data comparison without failing the +entire consistency check. The value must be lower than or equal to the +`contents_sample_percentage`. + +All three options are required when using the spot check. And because the spot +check relies on these configured tolerances, it may not be a +set-it-and-forget-it type of consistency check, at least until you get the +tolerances dialed in so there are minimal false positives or negatives. For +certain workloads where your source files experience wild swings of changed +data or file counts, the spot check may not suitable at all. + +What if you change or add or delete a bunch of your source files and you don't +want the spot check to fail the next time it's run? Run `borgmatic create` to +create a new backup, thereby allowing the spot check to run against an archive +that contains your source file changes. + +While the spot check feature is currently in beta, it may be subject to +breaking changes. But feel free to use it in production if you're okay with +that caveat, and please [provide any +feedback](https://torsion.org/borgmatic/#issues) you have on this feature. + + ### Check frequency New in version 1.6.2 You can From 75bdbe60873d7b021be8f74a2e06054e977355d4 Mon Sep 17 00:00:00 2001 From: Dan Helfman Date: Mon, 15 Apr 2024 14:18:42 -0700 Subject: [PATCH 5/5] Spot check documentation and edge case tweaks (#656). --- NEWS | 8 +-- borgmatic/actions/check.py | 2 +- docs/how-to/deal-with-very-large-backups.md | 58 +++++++++++---------- test_requirements.txt | 2 +- tests/unit/actions/test_check.py | 36 +++++++++++++ 5 files changed, 73 insertions(+), 33 deletions(-) diff --git a/NEWS b/NEWS index 22d0952..7e50965 100644 --- a/NEWS +++ b/NEWS @@ -1,8 +1,8 @@ 1.8.10.dev0 - * #656: Add a "spot" consistency check that compares file counts and contents between your source - files and the latest archive, ensuring they fall within configured tolerances. This can catch - problems like incorrect excludes, inadvertent deletes, files changed by malware, etc. See the - documentation for more information: + * #656 (beta): Add a "spot" consistency check that compares file counts and contents between your + source files and the latest archive, ensuring they fall within configured tolerances. This can + catch problems like incorrect excludes, inadvertent deletes, files changed by malware, etc. See + the documentation for more information: https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#spot-check * #842: When a command hook exits with a soft failure, ping the log and finish states for any configured monitoring hooks. diff --git a/borgmatic/actions/check.py b/borgmatic/actions/check.py index 2c21570..60d8f05 100644 --- a/borgmatic/actions/check.py +++ b/borgmatic/actions/check.py @@ -410,7 +410,7 @@ def compare_spot_check_hashes( # source directories. spot_check_config = next(check for check in config['checks'] if check['name'] == 'spot') sample_count = max( - int(len(source_paths) * (spot_check_config['data_sample_percentage'] / 100)), 1 + int(len(source_paths) * (min(spot_check_config['data_sample_percentage'], 100) / 100)), 1 ) source_sample_paths = tuple(random.sample(source_paths, sample_count)) existing_source_sample_paths = { diff --git a/docs/how-to/deal-with-very-large-backups.md b/docs/how-to/deal-with-very-large-backups.md index b4ba3fd..5331398 100644 --- a/docs/how-to/deal-with-very-large-backups.md +++ b/docs/how-to/deal-with-very-large-backups.md @@ -108,23 +108,23 @@ for more information. The various consistency checks all have trade-offs around speed and thoroughness, but most of them don't even look at your original source files—arguably one important way to ensure your backups contain the files -you'll ultimately want to restore in the case of catastrophe (or just an -accidentally deleted file). Because if something goes wrong with your source -files, most consistency checks will still pass with flying colors and you -won't discover there's a problem until you go to restore. +you'll want to restore in the case of catastrophe (or just an accidentally +deleted file). Because if something goes wrong with your source files, most +consistency checks will still pass with flying colors and you won't discover +there's a problem until you go to restore. New in version 1.8.10 Beta feature That's where the spot -check comes in. This check actually compares your source files counts and data +check comes in. This check actually compares your source file counts and data against those in the latest archive, potentially catching problems like incorrect excludes, inadvertent deletes, files changed by malware, etc. However, because an exhaustive comparison of all source files against the -latest archive might be too slow, the spot check supports sampling a +latest archive might be too slow, the spot check supports *sampling* a percentage of your source files for the comparison, ensuring it falls within configured tolerances. -Here's how to use it. Start by installing the `xxhash` OS package if you don't +Here's how it works. Start by installing the `xxhash` OS package if you don't already have it, so the spot check can run the `xxh64sum` command and efficiently hash files for comparison. Then add something like the following to your borgmatic configuration: @@ -140,40 +140,44 @@ checks: The `count_tolerance_percentage` is the percentage delta between the source directories file count and the latest backup archive file count that is allowed before the entire consistency check fails. For instance, if the spot -check runs and finds 100 source files and 105 files in the latest archive, -that would be within a 10% count tolerance and the check would succeed. But if -there were 100 source files and 200 archive files, the check would fail. (100 -source files and only 50 archive files would also fail.) +check runs and finds 100 source files on disk and 105 files in the latest +archive, that would be within the configured 10% count tolerance and the check +would succeed. But if there were 100 source files and 200 archive files, the +check would fail. (100 source files and only 50 archive files would also +fail.) The `data_sample_percentage` is the percentage of total files in the source directories to randomly sample and compare to their corresponding files in the -latest backup archive. The comparison is performed by hashing the selected -files in each of the source paths and the backup archive and counting hashes -that don't match. For instance, if you have 1,000 source files and your sample -percentage is 1%, then only 10 source files will be compared against the -latest archive. These sampled files are selected randomly each time, so in -effect the spot check is a probabilistic check. +latest backup archive. A higher value allows a more accurate check—and a +slower one. The comparison is performed by hashing the selected files in each +of the source paths and counting hashes that don't match the latest archive. +For instance, if you have 1,000 source files and your sample percentage is 1%, +then only 10 source files will be compared against the latest archive. These +sampled files are selected randomly each time, so in effect the spot check is +probabilistic. The `data_tolerance_percentage` is the percentage of total files in the source directories that can fail a spot check data comparison without failing the entire consistency check. The value must be lower than or equal to the `contents_sample_percentage`. -All three options are required when using the spot check. And because the spot +All three options are required when using the spot check. And because the check relies on these configured tolerances, it may not be a set-it-and-forget-it type of consistency check, at least until you get the -tolerances dialed in so there are minimal false positives or negatives. For -certain workloads where your source files experience wild swings of changed -data or file counts, the spot check may not suitable at all. +tolerances dialed in so there are minimal false positives or negatives. It is +recommended you run `borgmatic check` several times after configuring the spot +check, tweaking your tolerances as needed. For certain workloads where your +source files experience wild swings of file contents or counts, the spot check +may not suitable at all. -What if you change or add or delete a bunch of your source files and you don't +What if you add, delete, or change a bunch of your source files and you don't want the spot check to fail the next time it's run? Run `borgmatic create` to -create a new backup, thereby allowing the spot check to run against an archive -that contains your source file changes. +create a new backup, thereby allowing the next spot check to run against an +archive that contains your recent changes. -While the spot check feature is currently in beta, it may be subject to -breaking changes. But feel free to use it in production if you're okay with -that caveat, and please [provide any +As long as the spot check feature is in beta, it may be subject to breaking +changes. But feel free to use it in production if you're okay with that +caveat, and please [provide any feedback](https://torsion.org/borgmatic/#issues) you have on this feature. diff --git a/test_requirements.txt b/test_requirements.txt index 0b63bb8..24adc2b 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -13,7 +13,7 @@ flake8-quotes==3.3.2 flake8-use-fstring==1.4 flake8-variables-names==0.0.5 flexmock==0.11.3 -idna==3.4 +idna==3.7 isort==5.12.0 jsonschema==4.17.3 Markdown==3.4.1 diff --git a/tests/unit/actions/test_check.py b/tests/unit/actions/test_check.py index 8246ec5..4862720 100644 --- a/tests/unit/actions/test_check.py +++ b/tests/unit/actions/test_check.py @@ -635,6 +635,42 @@ def test_compare_spot_check_hashes_returns_paths_having_failing_hashes(): ) == ('/bar',) +def test_compare_spot_check_hashes_handles_data_sample_percentage_above_100(): + flexmock(module.random).should_receive('sample').replace_with( + lambda population, count: population[:count] + ) + flexmock(module.os.path).should_receive('exists').and_return(True) + flexmock(module.borgmatic.execute).should_receive( + 'execute_command_and_capture_output' + ).with_args(('xxh64sum', '/foo', '/bar')).and_return('hash1 /foo\nhash2 /bar') + flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( + ['nothash1 /foo', 'nothash2 /bar'] + ) + + assert module.compare_spot_check_hashes( + repository={'path': 'repo'}, + archive='archive', + config={ + 'checks': [ + { + 'name': 'archives', + 'frequency': '2 weeks', + }, + { + 'name': 'spot', + 'data_sample_percentage': 1000, + }, + ] + }, + local_borg_version=flexmock(), + global_arguments=flexmock(), + local_path=flexmock(), + remote_path=flexmock(), + log_label='repo', + source_paths=('/foo', '/bar'), + ) == ('/foo', '/bar') + + def test_compare_spot_check_hashes_uses_xxh64sum_command_option(): flexmock(module.random).should_receive('sample').replace_with( lambda population, count: population[:count]