Remove sections (#721).

This commit is contained in:
Dan Helfman 2023-07-08 23:14:30 -07:00
parent 9cafc16052
commit 48b6a1679a
103 changed files with 2817 additions and 3050 deletions

7
NEWS
View file

@ -8,6 +8,13 @@
* #720: Fix an error when dumping a MySQL database and the "exclude_nodump" option is set. * #720: Fix an error when dumping a MySQL database and the "exclude_nodump" option is set.
* When merging two configuration files, error gracefully if the two files do not adhere to the same * When merging two configuration files, error gracefully if the two files do not adhere to the same
format. format.
* #721: Remove configuration sections ("location:", "storage:", "hooks:" etc.), while still keeping
deprecated support for them. Now, all options are at the same level, and you don't need to worry
about commenting/uncommenting section headers when you change an option.
* #721: BREAKING: The retention prefix and the consistency prefix can no longer have different
values (unless one is not set).
* #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless
one is not set).
* BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action, * BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action,
as newer versions of Borg list successful (non-checkpoint) archives by default. as newer versions of Borg list successful (non-checkpoint) archives by default.
* All deprecated configuration option values now generate warning logs. * All deprecated configuration option values now generate warning logs.

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_borg( def run_borg(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
borg_arguments, borg_arguments,
global_arguments, global_arguments,
@ -28,7 +28,7 @@ def run_borg(
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
borg_arguments.archive, borg_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -36,7 +36,7 @@ def run_borg(
) )
borgmatic.borg.borg.run_arbitrary_borg( borgmatic.borg.borg.run_arbitrary_borg(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
options=borg_arguments.options, options=borg_arguments.options,
archive=archive_name, archive=archive_name,

View file

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def run_break_lock( def run_break_lock(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
break_lock_arguments, break_lock_arguments,
global_arguments, global_arguments,
@ -26,7 +26,7 @@ def run_break_lock(
) )
borgmatic.borg.break_lock.break_lock( borgmatic.borg.break_lock.break_lock(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,

View file

@ -10,10 +10,7 @@ logger = logging.getLogger(__name__)
def run_check( def run_check(
config_filename, config_filename,
repository, repository,
location, config,
storage,
consistency,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
check_arguments, check_arguments,
@ -30,8 +27,8 @@ def run_check(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_check'), config.get('before_check'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-check', 'pre-check',
global_arguments.dry_run, global_arguments.dry_run,
@ -40,9 +37,7 @@ def run_check(
logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks') logger.info(f'{repository.get("label", repository["path"])}: Running consistency checks')
borgmatic.borg.check.check_archives( borgmatic.borg.check.check_archives(
repository['path'], repository['path'],
location, config,
storage,
consistency,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -53,8 +48,8 @@ def run_check(
force=check_arguments.force, force=check_arguments.force,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_check'), config.get('after_check'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-check', 'post-check',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
def run_compact( def run_compact(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
compact_arguments, compact_arguments,
@ -31,8 +29,8 @@ def run_compact(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_compact'), config.get('before_compact'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-compact', 'pre-compact',
global_arguments.dry_run, global_arguments.dry_run,
@ -45,7 +43,7 @@ def run_compact(
borgmatic.borg.compact.compact_segments( borgmatic.borg.compact.compact_segments(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -59,8 +57,8 @@ def run_compact(
f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)' f'{repository.get("label", repository["path"])}: Skipping compact (only available/needed in Borg 1.2+)'
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_compact'), config.get('after_compact'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-compact', 'post-compact',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -2,6 +2,7 @@ import logging
import borgmatic.config.generate import borgmatic.config.generate
import borgmatic.config.validate import borgmatic.config.validate
import borgmatic.logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -14,6 +15,7 @@ def run_generate(generate_arguments, global_arguments):
Raise FileExistsError if a file already exists at the destination path and the generate Raise FileExistsError if a file already exists at the destination path and the generate
arguments do not have overwrite set. arguments do not have overwrite set.
''' '''
borgmatic.logger.add_custom_log_levels()
dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else '' dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else ''
logger.answer( logger.answer(

View file

@ -1,6 +1,7 @@
import logging import logging
import borgmatic.config.generate import borgmatic.config.generate
import borgmatic.logger
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -14,6 +15,8 @@ def run_validate(validate_arguments, configs):
loading machinery prior to here, so this function mainly exists to support additional validate loading machinery prior to here, so this function mainly exists to support additional validate
flags like "--show". flags like "--show".
''' '''
borgmatic.logger.add_custom_log_levels()
if validate_arguments.show: if validate_arguments.show:
for config_path, config in configs.items(): for config_path, config in configs.items():
if len(configs) > 1: if len(configs) > 1:

View file

@ -17,7 +17,7 @@ import borgmatic.hooks.dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def create_borgmatic_manifest(location, config_paths, dry_run): def create_borgmatic_manifest(config, config_paths, dry_run):
''' '''
Create a borgmatic manifest file to store the paths to the configuration files used to create Create a borgmatic manifest file to store the paths to the configuration files used to create
the archive. the archive.
@ -25,7 +25,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run):
if dry_run: if dry_run:
return return
borgmatic_source_directory = location.get( borgmatic_source_directory = config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
) )
@ -49,9 +49,7 @@ def create_borgmatic_manifest(location, config_paths, dry_run):
def run_create( def run_create(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
create_arguments, create_arguments,
@ -71,8 +69,8 @@ def run_create(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_backup'), config.get('before_backup'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-backup', 'pre-backup',
global_arguments.dry_run, global_arguments.dry_run,
@ -81,30 +79,25 @@ def run_create(
logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}') logger.info(f'{repository.get("label", repository["path"])}: Creating archive{dry_run_label}')
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
active_dumps = borgmatic.hooks.dispatch.call_hooks( active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_databases', 'dump_databases',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
create_borgmatic_manifest( create_borgmatic_manifest(config, global_arguments.used_config_paths, global_arguments.dry_run)
location, global_arguments.used_config_paths, global_arguments.dry_run
)
stream_processes = [process for processes in active_dumps.values() for process in processes] stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borgmatic.borg.create.create_archive( json_output = borgmatic.borg.create.create_archive(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -120,15 +113,14 @@ def run_create(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
config_filename, config_filename,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_backup'), config.get('after_backup'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-backup', 'post-backup',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_export_tar( def run_export_tar(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
export_tar_arguments, export_tar_arguments,
global_arguments, global_arguments,
@ -31,7 +31,7 @@ def run_export_tar(
borgmatic.borg.rlist.resolve_archive_name( borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
export_tar_arguments.archive, export_tar_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -39,7 +39,7 @@ def run_export_tar(
), ),
export_tar_arguments.paths, export_tar_arguments.paths,
export_tar_arguments.destination, export_tar_arguments.destination,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,

View file

@ -11,9 +11,7 @@ logger = logging.getLogger(__name__)
def run_extract( def run_extract(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
extract_arguments, extract_arguments,
@ -25,8 +23,8 @@ def run_extract(
Run the "extract" action for the given repository. Run the "extract" action for the given repository.
''' '''
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_extract'), config.get('before_extract'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-extract', 'pre-extract',
global_arguments.dry_run, global_arguments.dry_run,
@ -44,15 +42,14 @@ def run_extract(
borgmatic.borg.rlist.resolve_archive_name( borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
extract_arguments.archive, extract_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
remote_path, remote_path,
), ),
extract_arguments.paths, extract_arguments.paths,
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,
@ -62,8 +59,8 @@ def run_extract(
progress=extract_arguments.progress, progress=extract_arguments.progress,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_extract'), config.get('after_extract'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-extract', 'post-extract',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -11,7 +11,7 @@ logger = logging.getLogger(__name__)
def run_info( def run_info(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
info_arguments, info_arguments,
global_arguments, global_arguments,
@ -33,7 +33,7 @@ def run_info(
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
info_arguments.archive, info_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -41,7 +41,7 @@ def run_info(
) )
json_output = borgmatic.borg.info.display_archives_info( json_output = borgmatic.borg.info.display_archives_info(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name), borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name),
global_arguments, global_arguments,

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def run_list( def run_list(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
list_arguments, list_arguments,
global_arguments, global_arguments,
@ -34,7 +34,7 @@ def run_list(
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
list_arguments.archive, list_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -42,7 +42,7 @@ def run_list(
) )
json_output = borgmatic.borg.list.list_archive( json_output = borgmatic.borg.list.list_archive(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name), borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name),
global_arguments, global_arguments,

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_mount( def run_mount(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
mount_arguments, mount_arguments,
global_arguments, global_arguments,
@ -34,14 +34,14 @@ def run_mount(
borgmatic.borg.rlist.resolve_archive_name( borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
mount_arguments.archive, mount_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
remote_path, remote_path,
), ),
mount_arguments, mount_arguments,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path=local_path, local_path=local_path,

View file

@ -10,9 +10,7 @@ logger = logging.getLogger(__name__)
def run_prune( def run_prune(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
prune_arguments, prune_arguments,
@ -30,8 +28,8 @@ def run_prune(
return return
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('before_prune'), config.get('before_prune'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-prune', 'pre-prune',
global_arguments.dry_run, global_arguments.dry_run,
@ -41,8 +39,7 @@ def run_prune(
borgmatic.borg.prune.prune_archives( borgmatic.borg.prune.prune_archives(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
retention,
local_borg_version, local_borg_version,
prune_arguments, prune_arguments,
global_arguments, global_arguments,
@ -50,8 +47,8 @@ def run_prune(
remote_path=remote_path, remote_path=remote_path,
) )
borgmatic.hooks.command.execute_hook( borgmatic.hooks.command.execute_hook(
hooks.get('after_prune'), config.get('after_prune'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-prune', 'post-prune',
global_arguments.dry_run, global_arguments.dry_run,

View file

@ -8,7 +8,7 @@ logger = logging.getLogger(__name__)
def run_rcreate( def run_rcreate(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
rcreate_arguments, rcreate_arguments,
global_arguments, global_arguments,
@ -27,7 +27,7 @@ def run_rcreate(
borgmatic.borg.rcreate.create_repository( borgmatic.borg.rcreate.create_repository(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
rcreate_arguments.encryption_mode, rcreate_arguments.encryption_mode,

View file

@ -18,12 +18,12 @@ UNSPECIFIED_HOOK = object()
def get_configured_database( def get_configured_database(
hooks, archive_database_names, hook_name, database_name, configuration_database_name=None config, archive_database_names, hook_name, database_name, configuration_database_name=None
): ):
''' '''
Find the first database with the given hook name and database name in the configured hooks Find the first database with the given hook name and database name in the configuration dict and
dict and the given archive database names dict (from hook name to database names contained in the given archive database names dict (from hook name to database names contained in a
a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
hooks for the named database. If a configuration database name is given, use that instead of the hooks for the named database. If a configuration database name is given, use that instead of the
database name to lookup the database in the given hooks configuration. database name to lookup the database in the given hooks configuration.
@ -33,9 +33,13 @@ def get_configured_database(
configuration_database_name = database_name configuration_database_name = database_name
if hook_name == UNSPECIFIED_HOOK: if hook_name == UNSPECIFIED_HOOK:
hooks_to_search = hooks hooks_to_search = {
hook_name: value
for (hook_name, value) in config.items()
if hook_name in borgmatic.hooks.dump.DATABASE_HOOK_NAMES
}
else: else:
hooks_to_search = {hook_name: hooks[hook_name]} hooks_to_search = {hook_name: config[hook_name]}
return next( return next(
( (
@ -58,9 +62,7 @@ def get_configured_hook_name_and_database(hooks, database_name):
def restore_single_database( def restore_single_database(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -81,10 +83,9 @@ def restore_single_database(
dump_pattern = borgmatic.hooks.dispatch.call_hooks( dump_pattern = borgmatic.hooks.dispatch.call_hooks(
'make_database_dump_pattern', 'make_database_dump_pattern',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
database['name'], database['name'],
)[hook_name] )[hook_name]
@ -94,8 +95,7 @@ def restore_single_database(
repository=repository['path'], repository=repository['path'],
archive=archive_name, archive=archive_name,
paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]), paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location, config=config,
storage_config=storage,
local_borg_version=local_borg_version, local_borg_version=local_borg_version,
global_arguments=global_arguments, global_arguments=global_arguments,
local_path=local_path, local_path=local_path,
@ -112,7 +112,7 @@ def restore_single_database(
{hook_name: [database]}, {hook_name: [database]},
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location, config,
global_arguments.dry_run, global_arguments.dry_run,
extract_process, extract_process,
connection_params, connection_params,
@ -122,21 +122,20 @@ def restore_single_database(
def collect_archive_database_names( def collect_archive_database_names(
repository, repository,
archive, archive,
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
remote_path, remote_path,
): ):
''' '''
Given a local or remote repository path, a resolved archive name, a location configuration dict, Given a local or remote repository path, a resolved archive name, a configuration dict, the
a storage configuration dict, the local Borg version, global_arguments an argparse.Namespace, local Borg version, global_arguments an argparse.Namespace, and local and remote Borg paths,
and local and remote Borg paths, query the archive for the names of databases it contains and query the archive for the names of databases it contains and return them as a dict from hook
return them as a dict from hook name to a sequence of database names. name to a sequence of database names.
''' '''
borgmatic_source_directory = os.path.expanduser( borgmatic_source_directory = os.path.expanduser(
location.get( config.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
) )
).lstrip('/') ).lstrip('/')
@ -146,7 +145,7 @@ def collect_archive_database_names(
dump_paths = borgmatic.borg.list.capture_archive_listing( dump_paths = borgmatic.borg.list.capture_archive_listing(
repository, repository,
archive, archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
list_path=parent_dump_path, list_path=parent_dump_path,
@ -249,9 +248,7 @@ def ensure_databases_found(restore_names, remaining_restore_names, found_names):
def run_restore( def run_restore(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
restore_arguments, restore_arguments,
global_arguments, global_arguments,
@ -275,17 +272,16 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )
archive_name = borgmatic.borg.rlist.resolve_archive_name( archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository['path'], repository['path'],
restore_arguments.archive, restore_arguments.archive,
storage, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -294,8 +290,7 @@ def run_restore(
archive_database_names = collect_archive_database_names( archive_database_names = collect_archive_database_names(
repository['path'], repository['path'],
archive_name, archive_name,
location, config,
storage,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -315,7 +310,7 @@ def run_restore(
for hook_name, database_names in restore_names.items(): for hook_name, database_names in restore_names.items():
for database_name in database_names: for database_name in database_names:
found_hook_name, found_database = get_configured_database( found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name config, archive_database_names, hook_name, database_name
) )
if not found_database: if not found_database:
@ -327,9 +322,7 @@ def run_restore(
found_names.add(database_name) found_names.add(database_name)
restore_single_database( restore_single_database(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -340,12 +333,12 @@ def run_restore(
connection_params, connection_params,
) )
# For any database that weren't found via exact matches in the hooks configuration, try to # For any database that weren't found via exact matches in the configuration, try to fallback
# fallback to "all" entries. # to "all" entries.
for hook_name, database_names in remaining_restore_names.items(): for hook_name, database_names in remaining_restore_names.items():
for database_name in database_names: for database_name in database_names:
found_hook_name, found_database = get_configured_database( found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name, 'all' config, archive_database_names, hook_name, database_name, 'all'
) )
if not found_database: if not found_database:
@ -357,9 +350,7 @@ def run_restore(
restore_single_database( restore_single_database(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -372,10 +363,9 @@ def run_restore(
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps', 'remove_database_dumps',
hooks, config,
repository['path'], repository['path'],
borgmatic.hooks.dump.DATABASE_HOOK_NAMES, borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run, global_arguments.dry_run,
) )

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_rinfo( def run_rinfo(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
rinfo_arguments, rinfo_arguments,
global_arguments, global_arguments,
@ -31,7 +31,7 @@ def run_rinfo(
json_output = borgmatic.borg.rinfo.display_repository_info( json_output = borgmatic.borg.rinfo.display_repository_info(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
rinfo_arguments=rinfo_arguments, rinfo_arguments=rinfo_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def run_rlist( def run_rlist(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -29,7 +29,7 @@ def run_rlist(
json_output = borgmatic.borg.rlist.list_repository( json_output = borgmatic.borg.rlist.list_repository(
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -7,7 +7,7 @@ logger = logging.getLogger(__name__)
def run_transfer( def run_transfer(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
transfer_arguments, transfer_arguments,
global_arguments, global_arguments,
@ -23,7 +23,7 @@ def run_transfer(
borgmatic.borg.transfer.transfer_archives( borgmatic.borg.transfer.transfer_archives(
global_arguments.dry_run, global_arguments.dry_run,
repository['path'], repository['path'],
storage, config,
local_borg_version, local_borg_version,
transfer_arguments, transfer_arguments,
global_arguments, global_arguments,

View file

@ -13,7 +13,7 @@ BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'}
def run_arbitrary_borg( def run_arbitrary_borg(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
options, options,
archive=None, archive=None,
@ -21,13 +21,13 @@ def run_arbitrary_borg(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, a Given a local or remote repository path, a configuration dict, the local Borg version, a
sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary
Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the
command. command.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
try: try:
options = options[1:] if options[0] == '--' else options options = options[1:] if options[0] == '--' else options
@ -61,7 +61,7 @@ def run_arbitrary_borg(
borg_local_path=local_path, borg_local_path=local_path,
shell=True, shell=True,
extra_environment=dict( extra_environment=dict(
(environment.make_environment(storage_config) or {}), (environment.make_environment(config) or {}),
**{ **{
'BORG_REPO': repository_path, 'BORG_REPO': repository_path,
'ARCHIVE': archive if archive else '', 'ARCHIVE': archive if archive else '',

View file

@ -8,19 +8,19 @@ logger = logging.getLogger(__name__)
def break_lock( def break_lock(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage configuration dict, the local Borg version, Given a local or remote repository path, a configuration dict, the local Borg version, an
an argparse.Namespace of global arguments, and optional local and remote Borg paths, break any argparse.Namespace of global arguments, and optional local and remote Borg paths, break any
repository and cache locks leftover from Borg aborting. repository and cache locks leftover from Borg aborting.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'break-lock') (local_path, 'break-lock')
@ -33,5 +33,5 @@ def break_lock(
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment) execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment)

View file

@ -19,12 +19,12 @@ DEFAULT_CHECKS = (
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def parse_checks(consistency_config, only_checks=None): def parse_checks(config, only_checks=None):
''' '''
Given a consistency config with a "checks" sequence of dicts and an optional list of override Given a configuration dict with a "checks" sequence of dicts and an optional list of override
checks, return a tuple of named checks to run. checks, return a tuple of named checks to run.
For example, given a retention config of: For example, given a config of:
{'checks': ({'name': 'repository'}, {'name': 'archives'})} {'checks': ({'name': 'repository'}, {'name': 'archives'})}
@ -36,8 +36,7 @@ def parse_checks(consistency_config, only_checks=None):
has a name of "disabled", return an empty tuple, meaning that no checks should be run. has a name of "disabled", return an empty tuple, meaning that no checks should be run.
''' '''
checks = only_checks or tuple( checks = only_checks or tuple(
check_config['name'] check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS)
for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS)
) )
checks = tuple(check.lower() for check in checks) checks = tuple(check.lower() for check in checks)
if 'disabled' in checks: if 'disabled' in checks:
@ -90,23 +89,22 @@ def parse_frequency(frequency):
def filter_checks_on_frequency( def filter_checks_on_frequency(
location_config, config,
consistency_config,
borg_repository_id, borg_repository_id,
checks, checks,
force, force,
archives_check_id=None, archives_check_id=None,
): ):
''' '''
Given a location config, a consistency config with a "checks" sequence of dicts, a Borg Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence
repository ID, a sequence of checks, whether to force checks to run, and an ID for the archives of checks, whether to force checks to run, and an ID for the archives check potentially being
check potentially being run (if any), filter down those checks based on the configured run (if any), filter down those checks based on the configured "frequency" for each check as
"frequency" for each check as compared to its check time file. compared to its check time file.
In other words, a check whose check time file's timestamp is too new (based on the configured In other words, a check whose check time file's timestamp is too new (based on the configured
frequency) will get cut from the returned sequence of checks. Example: frequency) will get cut from the returned sequence of checks. Example:
consistency_config = { config = {
'checks': [ 'checks': [
{ {
'name': 'archives', 'name': 'archives',
@ -115,9 +113,9 @@ def filter_checks_on_frequency(
] ]
} }
When this function is called with that consistency_config and "archives" in checks, "archives" When this function is called with that config and "archives" in checks, "archives" will get
will get filtered out of the returned result if its check time file is newer than 2 weeks old, filtered out of the returned result if its check time file is newer than 2 weeks old, indicating
indicating that it's not yet time to run that check again. that it's not yet time to run that check again.
Raise ValueError if a frequency cannot be parsed. Raise ValueError if a frequency cannot be parsed.
''' '''
@ -126,7 +124,7 @@ def filter_checks_on_frequency(
if force: if force:
return tuple(filtered_checks) return tuple(filtered_checks)
for check_config in consistency_config.get('checks', DEFAULT_CHECKS): for check_config in config.get('checks', DEFAULT_CHECKS):
check = check_config['name'] check = check_config['name']
if checks and check not in checks: if checks and check not in checks:
continue continue
@ -135,9 +133,7 @@ def filter_checks_on_frequency(
if not frequency_delta: if not frequency_delta:
continue continue
check_time = probe_for_check_time( check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id)
location_config, borg_repository_id, check, archives_check_id
)
if not check_time: if not check_time:
continue continue
@ -153,13 +149,11 @@ def filter_checks_on_frequency(
return tuple(filtered_checks) return tuple(filtered_checks)
def make_archive_filter_flags( def make_archive_filter_flags(local_borg_version, config, checks, check_last=None, prefix=None):
local_borg_version, storage_config, checks, check_last=None, prefix=None
):
''' '''
Given the local Borg version, a storage configuration dict, a parsed sequence of checks, the Given the local Borg version, a configuration dict, a parsed sequence of checks, the check last
check last value, and a consistency check prefix, transform the checks into tuple of value, and a consistency check prefix, transform the checks into tuple of command-line flags for
command-line flags for filtering archives in a check command. filtering archives in a check command.
If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if If a check_last value is given and "archives" is in checks, then include a "--last" flag. And if
a prefix value is given and "archives" is in checks, then include a "--match-archives" flag. a prefix value is given and "archives" is in checks, then include a "--match-archives" flag.
@ -174,8 +168,8 @@ def make_archive_filter_flags(
if prefix if prefix
else ( else (
flags.make_match_archives_flags( flags.make_match_archives_flags(
storage_config.get('match_archives'), config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -237,14 +231,14 @@ def make_check_flags(checks, archive_filter_flags):
) )
def make_check_time_path(location_config, borg_repository_id, check_type, archives_check_id=None): def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None):
''' '''
Given a location configuration dict, a Borg repository ID, the name of a check type Given a configuration dict, a Borg repository ID, the name of a check type ("repository",
("repository", "archives", etc.), and a unique hash of the archives filter flags, return a "archives", etc.), and a unique hash of the archives filter flags, return a path for recording
path for recording that check's time (the time of that check last occurring). that check's time (the time of that check last occurring).
''' '''
borgmatic_source_directory = os.path.expanduser( borgmatic_source_directory = os.path.expanduser(
location_config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY) config.get('borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY)
) )
if check_type in ('archives', 'data'): if check_type in ('archives', 'data'):
@ -287,11 +281,11 @@ def read_check_time(path):
return None return None
def probe_for_check_time(location_config, borg_repository_id, check, archives_check_id): def probe_for_check_time(config, borg_repository_id, check, archives_check_id):
''' '''
Given a location configuration dict, a Borg repository ID, the name of a check type Given a configuration dict, a Borg repository ID, the name of a check type ("repository",
("repository", "archives", etc.), and a unique hash of the archives filter flags, return a "archives", etc.), and a unique hash of the archives filter flags, return a the corresponding
the corresponding check time or None if such a check time does not exist. check time or None if such a check time does not exist.
When the check type is "archives" or "data", this function probes two different paths to find When the check type is "archives" or "data", this function probes two different paths to find
the check time, e.g.: the check time, e.g.:
@ -311,8 +305,8 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch
read_check_time(group[0]) read_check_time(group[0])
for group in itertools.groupby( for group in itertools.groupby(
( (
make_check_time_path(location_config, borg_repository_id, check, archives_check_id), make_check_time_path(config, borg_repository_id, check, archives_check_id),
make_check_time_path(location_config, borg_repository_id, check), make_check_time_path(config, borg_repository_id, check),
) )
) )
) )
@ -323,10 +317,10 @@ def probe_for_check_time(location_config, borg_repository_id, check, archives_ch
return None return None
def upgrade_check_times(location_config, borg_repository_id): def upgrade_check_times(config, borg_repository_id):
''' '''
Given a location configuration dict and a Borg repository ID, upgrade any corresponding check Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on
times on disk from old-style paths to new-style paths. disk from old-style paths to new-style paths.
Currently, the only upgrade performed is renaming an archive or data check path that looks like: Currently, the only upgrade performed is renaming an archive or data check path that looks like:
@ -337,7 +331,7 @@ def upgrade_check_times(location_config, borg_repository_id):
~/.borgmatic/checks/1234567890/archives/all ~/.borgmatic/checks/1234567890/archives/all
''' '''
for check_type in ('archives', 'data'): for check_type in ('archives', 'data'):
new_path = make_check_time_path(location_config, borg_repository_id, check_type, 'all') new_path = make_check_time_path(config, borg_repository_id, check_type, 'all')
old_path = os.path.dirname(new_path) old_path = os.path.dirname(new_path)
temporary_path = f'{old_path}.temp' temporary_path = f'{old_path}.temp'
@ -357,9 +351,7 @@ def upgrade_check_times(location_config, borg_repository_id):
def check_archives( def check_archives(
repository_path, repository_path,
location_config, config,
storage_config,
consistency_config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -370,10 +362,9 @@ def check_archives(
force=None, force=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, a consistency config dict, Given a local or remote repository path, a configuration dict, local/remote commands to run,
local/remote commands to run, whether to include progress information, whether to attempt a whether to include progress information, whether to attempt a repair, and an optional list of
repair, and an optional list of checks to use instead of configured checks, check the contained checks to use instead of configured checks, check the contained Borg archives for consistency.
Borg archives for consistency.
If there are no consistency checks to run, skip running them. If there are no consistency checks to run, skip running them.
@ -383,7 +374,7 @@ def check_archives(
borg_repository_id = json.loads( borg_repository_id = json.loads(
rinfo.display_repository_info( rinfo.display_repository_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
argparse.Namespace(json=True), argparse.Namespace(json=True),
global_arguments, global_arguments,
@ -394,21 +385,20 @@ def check_archives(
except (json.JSONDecodeError, KeyError): except (json.JSONDecodeError, KeyError):
raise ValueError(f'Cannot determine Borg repository ID for {repository_path}') raise ValueError(f'Cannot determine Borg repository ID for {repository_path}')
upgrade_check_times(location_config, borg_repository_id) upgrade_check_times(config, borg_repository_id)
check_last = consistency_config.get('check_last', None) check_last = config.get('check_last', None)
prefix = consistency_config.get('prefix') prefix = config.get('prefix')
configured_checks = parse_checks(consistency_config, only_checks) configured_checks = parse_checks(config, only_checks)
lock_wait = None lock_wait = None
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '') extra_borg_options = config.get('extra_borg_options', {}).get('check', '')
archive_filter_flags = make_archive_filter_flags( archive_filter_flags = make_archive_filter_flags(
local_borg_version, storage_config, configured_checks, check_last, prefix local_borg_version, config, configured_checks, check_last, prefix
) )
archives_check_id = make_archives_check_id(archive_filter_flags) archives_check_id = make_archives_check_id(archive_filter_flags)
checks = filter_checks_on_frequency( checks = filter_checks_on_frequency(
location_config, config,
consistency_config,
borg_repository_id, borg_repository_id,
configured_checks, configured_checks,
force, force,
@ -416,7 +406,7 @@ def check_archives(
) )
if set(checks).intersection({'repository', 'archives', 'data'}): if set(checks).intersection({'repository', 'archives', 'data'}):
lock_wait = storage_config.get('lock_wait') lock_wait = config.get('lock_wait')
verbosity_flags = () verbosity_flags = ()
if logger.isEnabledFor(logging.INFO): if logger.isEnabledFor(logging.INFO):
@ -437,7 +427,7 @@ def check_archives(
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# The Borg repair option triggers an interactive prompt, which won't work when output is # The Borg repair option triggers an interactive prompt, which won't work when output is
# captured. And progress messes with the terminal directly. # captured. And progress messes with the terminal directly.
@ -450,12 +440,12 @@ def check_archives(
for check in checks: for check in checks:
write_check_time( write_check_time(
make_check_time_path(location_config, borg_repository_id, check, archives_check_id) make_check_time_path(config, borg_repository_id, check, archives_check_id)
) )
if 'extract' in checks: if 'extract' in checks:
extract.extract_last_archive_dry_run( extract.extract_last_archive_dry_run(
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
repository_path, repository_path,
@ -463,4 +453,4 @@ def check_archives(
local_path, local_path,
remote_path, remote_path,
) )
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract')) write_check_time(make_check_time_path(config, borg_repository_id, 'extract'))

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def compact_segments( def compact_segments(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -19,12 +19,12 @@ def compact_segments(
threshold=None, threshold=None,
): ):
''' '''
Given dry-run flag, a local or remote repository path, a storage config dict, and the local Given dry-run flag, a local or remote repository path, a configuration dict, and the local Borg
Borg version, compact the segments in a repository. version, compact the segments in a repository.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '') extra_borg_options = config.get('extra_borg_options', {}).get('compact', '')
full_command = ( full_command = (
(local_path, 'compact') (local_path, 'compact')
@ -49,5 +49,5 @@ def compact_segments(
full_command, full_command,
output_log_level=logging.INFO, output_log_level=logging.INFO,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -146,12 +146,12 @@ def ensure_files_readable(*filename_lists):
open(file_object).close() open(file_object).close()
def make_pattern_flags(location_config, pattern_filename=None): def make_pattern_flags(config, pattern_filename=None):
''' '''
Given a location config dict with a potential patterns_from option, and a filename containing Given a configuration dict with a potential patterns_from option, and a filename containing any
any additional patterns, return the corresponding Borg flags for those files as a tuple. additional patterns, return the corresponding Borg flags for those files as a tuple.
''' '''
pattern_filenames = tuple(location_config.get('patterns_from') or ()) + ( pattern_filenames = tuple(config.get('patterns_from') or ()) + (
(pattern_filename,) if pattern_filename else () (pattern_filename,) if pattern_filename else ()
) )
@ -162,12 +162,12 @@ def make_pattern_flags(location_config, pattern_filename=None):
) )
def make_exclude_flags(location_config, exclude_filename=None): def make_exclude_flags(config, exclude_filename=None):
''' '''
Given a location config dict with various exclude options, and a filename containing any exclude Given a configuration dict with various exclude options, and a filename containing any exclude
patterns, return the corresponding Borg flags as a tuple. patterns, return the corresponding Borg flags as a tuple.
''' '''
exclude_filenames = tuple(location_config.get('exclude_from') or ()) + ( exclude_filenames = tuple(config.get('exclude_from') or ()) + (
(exclude_filename,) if exclude_filename else () (exclude_filename,) if exclude_filename else ()
) )
exclude_from_flags = tuple( exclude_from_flags = tuple(
@ -175,17 +175,15 @@ def make_exclude_flags(location_config, exclude_filename=None):
('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames
) )
) )
caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else () caches_flag = ('--exclude-caches',) if config.get('exclude_caches') else ()
if_present_flags = tuple( if_present_flags = tuple(
itertools.chain.from_iterable( itertools.chain.from_iterable(
('--exclude-if-present', if_present) ('--exclude-if-present', if_present)
for if_present in location_config.get('exclude_if_present', ()) for if_present in config.get('exclude_if_present', ())
) )
) )
keep_exclude_tags_flags = ( keep_exclude_tags_flags = ('--keep-exclude-tags',) if config.get('keep_exclude_tags') else ()
('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else () exclude_nodump_flags = ('--exclude-nodump',) if config.get('exclude_nodump') else ()
)
exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else ()
return ( return (
exclude_from_flags exclude_from_flags
@ -326,8 +324,7 @@ def check_all_source_directories_exist(source_directories):
def create_archive( def create_archive(
dry_run, dry_run,
repository_path, repository_path,
location_config, config,
storage_config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -339,72 +336,70 @@ def create_archive(
stream_processes=None, stream_processes=None,
): ):
''' '''
Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a Given vebosity/dry-run flags, a local or remote repository path, and a configuration dict,
storage config dict, create a Borg archive and return Borg's JSON output (if any). create a Borg archive and return Borg's JSON output (if any).
If a sequence of stream processes is given (instances of subprocess.Popen), then execute the If a sequence of stream processes is given (instances of subprocess.Popen), then execute the
create command while also triggering the given processes to produce output. create command while also triggering the given processes to produce output.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
borgmatic_source_directories = expand_directories( borgmatic_source_directories = expand_directories(
collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory')) collect_borgmatic_source_directories(config.get('borgmatic_source_directory'))
) )
if location_config.get('source_directories_must_exist', False): if config.get('source_directories_must_exist', False):
check_all_source_directories_exist(location_config.get('source_directories')) check_all_source_directories_exist(config.get('source_directories'))
sources = deduplicate_directories( sources = deduplicate_directories(
map_directories_to_devices( map_directories_to_devices(
expand_directories( expand_directories(
tuple(location_config.get('source_directories', ())) tuple(config.get('source_directories', ()))
+ borgmatic_source_directories + borgmatic_source_directories
+ tuple(global_arguments.used_config_paths) + tuple(global_arguments.used_config_paths)
) )
), ),
additional_directory_devices=map_directories_to_devices( additional_directory_devices=map_directories_to_devices(
expand_directories(pattern_root_directories(location_config.get('patterns'))) expand_directories(pattern_root_directories(config.get('patterns')))
), ),
) )
ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from')) ensure_files_readable(config.get('patterns_from'), config.get('exclude_from'))
try: try:
working_directory = os.path.expanduser(location_config.get('working_directory')) working_directory = os.path.expanduser(config.get('working_directory'))
except TypeError: except TypeError:
working_directory = None working_directory = None
pattern_file = ( pattern_file = (
write_pattern_file(location_config.get('patterns'), sources) write_pattern_file(config.get('patterns'), sources)
if location_config.get('patterns') or location_config.get('patterns_from') if config.get('patterns') or config.get('patterns_from')
else None else None
) )
exclude_file = write_pattern_file( exclude_file = write_pattern_file(expand_home_directories(config.get('exclude_patterns')))
expand_home_directories(location_config.get('exclude_patterns')) checkpoint_interval = config.get('checkpoint_interval', None)
) checkpoint_volume = config.get('checkpoint_volume', None)
checkpoint_interval = storage_config.get('checkpoint_interval', None) chunker_params = config.get('chunker_params', None)
checkpoint_volume = storage_config.get('checkpoint_volume', None) compression = config.get('compression', None)
chunker_params = storage_config.get('chunker_params', None) upload_rate_limit = config.get('upload_rate_limit', None)
compression = storage_config.get('compression', None) umask = config.get('umask', None)
upload_rate_limit = storage_config.get('upload_rate_limit', None) lock_wait = config.get('lock_wait', None)
umask = storage_config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None)
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run) list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
files_cache = location_config.get('files_cache') files_cache = config.get('files_cache')
archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) archive_name_format = config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '') extra_borg_options = config.get('extra_borg_options', {}).get('create', '')
if feature.available(feature.Feature.ATIME, local_borg_version): if feature.available(feature.Feature.ATIME, local_borg_version):
atime_flags = ('--atime',) if location_config.get('atime') is True else () atime_flags = ('--atime',) if config.get('atime') is True else ()
else: else:
atime_flags = ('--noatime',) if location_config.get('atime') is False else () atime_flags = ('--noatime',) if config.get('atime') is False else ()
if feature.available(feature.Feature.NOFLAGS, local_borg_version): if feature.available(feature.Feature.NOFLAGS, local_borg_version):
noflags_flags = ('--noflags',) if location_config.get('flags') is False else () noflags_flags = ('--noflags',) if config.get('flags') is False else ()
else: else:
noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else () noflags_flags = ('--nobsdflags',) if config.get('flags') is False else ()
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else ()
else: else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else ()
if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version): if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version):
upload_ratelimit_flags = ( upload_ratelimit_flags = (
@ -415,7 +410,7 @@ def create_archive(
('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else ()
) )
if stream_processes and location_config.get('read_special') is False: if stream_processes and config.get('read_special') is False:
logger.warning( logger.warning(
f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' f'{repository_path}: Ignoring configured "read_special" value of false, as true is needed for database hooks.'
) )
@ -423,23 +418,19 @@ def create_archive(
create_command = ( create_command = (
tuple(local_path.split(' ')) tuple(local_path.split(' '))
+ ('create',) + ('create',)
+ make_pattern_flags(location_config, pattern_file.name if pattern_file else None) + make_pattern_flags(config, pattern_file.name if pattern_file else None)
+ make_exclude_flags(location_config, exclude_file.name if exclude_file else None) + make_exclude_flags(config, exclude_file.name if exclude_file else None)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ()) + (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ())
+ (('--chunker-params', chunker_params) if chunker_params else ()) + (('--chunker-params', chunker_params) if chunker_params else ())
+ (('--compression', compression) if compression else ()) + (('--compression', compression) if compression else ())
+ upload_ratelimit_flags + upload_ratelimit_flags
+ ( + (('--one-file-system',) if config.get('one_file_system') or stream_processes else ())
('--one-file-system',)
if location_config.get('one_file_system') or stream_processes
else ()
)
+ numeric_ids_flags + numeric_ids_flags
+ atime_flags + atime_flags
+ (('--noctime',) if location_config.get('ctime') is False else ()) + (('--noctime',) if config.get('ctime') is False else ())
+ (('--nobirthtime',) if location_config.get('birthtime') is False else ()) + (('--nobirthtime',) if config.get('birthtime') is False else ())
+ (('--read-special',) if location_config.get('read_special') or stream_processes else ()) + (('--read-special',) if config.get('read_special') or stream_processes else ())
+ noflags_flags + noflags_flags
+ (('--files-cache', files_cache) if files_cache else ()) + (('--files-cache', files_cache) if files_cache else ())
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
@ -470,11 +461,11 @@ def create_archive(
# the terminal directly. # the terminal directly.
output_file = DO_NOT_CAPTURE if progress else None output_file = DO_NOT_CAPTURE if progress else None
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# If database hooks are enabled (as indicated by streaming processes), exclude files that might # If database hooks are enabled (as indicated by streaming processes), exclude files that might
# cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True.
if stream_processes and not location_config.get('read_special'): if stream_processes and not config.get('read_special'):
logger.debug(f'{repository_path}: Collecting special file paths') logger.debug(f'{repository_path}: Collecting special file paths')
special_file_paths = collect_special_file_paths( special_file_paths = collect_special_file_paths(
create_command, create_command,
@ -490,11 +481,11 @@ def create_archive(
) )
exclude_file = write_pattern_file( exclude_file = write_pattern_file(
expand_home_directories( expand_home_directories(
tuple(location_config.get('exclude_patterns') or ()) + special_file_paths tuple(config.get('exclude_patterns') or ()) + special_file_paths
), ),
pattern_file=exclude_file, pattern_file=exclude_file,
) )
create_command += make_exclude_flags(location_config, exclude_file.name) create_command += make_exclude_flags(config, exclude_file.name)
create_command += ( create_command += (
(('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ())

View file

@ -17,15 +17,15 @@ DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = {
} }
def make_environment(storage_config): def make_environment(config):
''' '''
Given a borgmatic storage configuration dict, return its options converted to a Borg environment Given a borgmatic configuration dict, return its options converted to a Borg environment
variable dict. variable dict.
''' '''
environment = {} environment = {}
for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items(): for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name) value = config.get(option_name)
if value: if value:
environment[environment_variable_name] = str(value) environment[environment_variable_name] = str(value)
@ -34,7 +34,7 @@ def make_environment(storage_config):
option_name, option_name,
environment_variable_name, environment_variable_name,
) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items(): ) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items():
value = storage_config.get(option_name, False) value = config.get(option_name, False)
environment[environment_variable_name] = 'yes' if value else 'no' environment[environment_variable_name] = 'yes' if value else 'no'
return environment return environment

View file

@ -13,7 +13,7 @@ def export_tar_archive(
archive, archive,
paths, paths,
destination_path, destination_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -24,16 +24,16 @@ def export_tar_archive(
): ):
''' '''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
export from the archive, a destination path to export to, a storage configuration dict, the export from the archive, a destination path to export to, a configuration dict, the local Borg
local Borg version, optional local and remote Borg paths, an optional filter program, whether to version, optional local and remote Borg paths, an optional filter program, whether to include
include per-file details, and an optional number of path components to strip, export the archive per-file details, and an optional number of path components to strip, export the archive into
into the given destination path as a tar-formatted file. the given destination path as a tar-formatted file.
If the destination path is "-", then stream the output to stdout instead of to a file. If the destination path is "-", then stream the output to stdout instead of to a file.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'export-tar') (local_path, 'export-tar')
@ -70,5 +70,5 @@ def export_tar_archive(
output_file=DO_NOT_CAPTURE if destination_path == '-' else None, output_file=DO_NOT_CAPTURE if destination_path == '-' else None,
output_log_level=output_log_level, output_log_level=output_log_level,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def extract_last_archive_dry_run( def extract_last_archive_dry_run(
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
repository_path, repository_path,
@ -32,7 +32,7 @@ def extract_last_archive_dry_run(
last_archive_name = rlist.resolve_archive_name( last_archive_name = rlist.resolve_archive_name(
repository_path, repository_path,
'latest', 'latest',
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path, local_path,
@ -43,7 +43,7 @@ def extract_last_archive_dry_run(
return return
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
full_extract_command = ( full_extract_command = (
(local_path, 'extract', '--dry-run') (local_path, 'extract', '--dry-run')
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
@ -66,8 +66,7 @@ def extract_archive(
repository, repository,
archive, archive,
paths, paths,
location_config, config,
storage_config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -80,22 +79,22 @@ def extract_archive(
''' '''
Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to
restore from the archive, the local Borg version string, an argparse.Namespace of global restore from the archive, the local Borg version string, an argparse.Namespace of global
arguments, location/storage configuration dicts, optional local and remote Borg paths, and an arguments, a configuration dict, optional local and remote Borg paths, and an optional
optional destination path to extract to, extract the archive into the current directory. destination path to extract to, extract the archive into the current directory.
If extract to stdout is True, then start the extraction streaming to stdout, and return that If extract to stdout is True, then start the extraction streaming to stdout, and return that
extract process as an instance of subprocess.Popen. extract process as an instance of subprocess.Popen.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
if progress and extract_to_stdout: if progress and extract_to_stdout:
raise ValueError('progress and extract_to_stdout cannot both be set') raise ValueError('progress and extract_to_stdout cannot both be set')
if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version):
numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else ()
else: else:
numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else ()
if strip_components == 'all': if strip_components == 'all':
if not paths: if not paths:
@ -127,7 +126,7 @@ def extract_archive(
+ (tuple(paths) if paths else ()) + (tuple(paths) if paths else ())
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# The progress output isn't compatible with captured and logged output, as progress messes with # The progress output isn't compatible with captured and logged output, as progress messes with
# the terminal directly. # the terminal directly.

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def display_archives_info( def display_archives_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
info_arguments, info_arguments,
global_arguments, global_arguments,
@ -17,12 +17,12 @@ def display_archives_info(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, global Given a local or remote repository path, a configuration dict, the local Borg version, global
arguments as an argparse.Namespace, and the arguments to the info action, display summary arguments as an argparse.Namespace, and the arguments to the info action, display summary
information for Borg archives in the repository or return JSON summary information. information for Borg archives in the repository or return JSON summary information.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'info') (local_path, 'info')
@ -50,8 +50,8 @@ def display_archives_info(
flags.make_match_archives_flags( flags.make_match_archives_flags(
info_arguments.match_archives info_arguments.match_archives
or info_arguments.archive or info_arguments.archive
or storage_config.get('match_archives'), or config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -65,12 +65,12 @@ def display_archives_info(
if info_arguments.json: if info_arguments.json:
return execute_command_and_capture_output( return execute_command_and_capture_output(
full_command, full_command,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )
else: else:
execute_command( execute_command(
full_command, full_command,
output_log_level=logging.ANSWER, output_log_level=logging.ANSWER,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -21,7 +21,7 @@ MAKE_FLAGS_EXCLUDES = (
def make_list_command( def make_list_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
list_arguments, list_arguments,
global_arguments, global_arguments,
@ -29,11 +29,11 @@ def make_list_command(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the arguments to the list Given a local or remote repository path, a configuration dict, the arguments to the list action,
action, and local and remote Borg paths, return a command as a tuple to list archives or paths and local and remote Borg paths, return a command as a tuple to list archives or paths within an
within an archive. archive.
''' '''
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
return ( return (
(local_path, 'list') (local_path, 'list')
@ -89,7 +89,7 @@ def make_find_paths(find_paths):
def capture_archive_listing( def capture_archive_listing(
repository_path, repository_path,
archive, archive,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
list_path=None, list_path=None,
@ -97,18 +97,18 @@ def capture_archive_listing(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, the archive path in which to list files, and version, global arguments as an argparse.Namespace, the archive path in which to list files, and
local and remote Borg paths, capture the output of listing that archive and return it as a list local and remote Borg paths, capture the output of listing that archive and return it as a list
of file paths. of file paths.
''' '''
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
return tuple( return tuple(
execute_command_and_capture_output( execute_command_and_capture_output(
make_list_command( make_list_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
argparse.Namespace( argparse.Namespace(
repository=repository_path, repository=repository_path,
@ -131,7 +131,7 @@ def capture_archive_listing(
def list_archive( def list_archive(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
list_arguments, list_arguments,
global_arguments, global_arguments,
@ -139,7 +139,7 @@ def list_archive(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, global Given a local or remote repository path, a configuration dict, the local Borg version, global
arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace, arguments as an argparse.Namespace, the arguments to the list action as an argparse.Namespace,
and local and remote Borg paths, display the output of listing the files of a Borg archive (or and local and remote Borg paths, display the output of listing the files of a Borg archive (or
return JSON output). If list_arguments.find_paths are given, list the files by searching across return JSON output). If list_arguments.find_paths are given, list the files by searching across
@ -167,7 +167,7 @@ def list_archive(
) )
return rlist.list_repository( return rlist.list_repository(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -187,7 +187,7 @@ def list_archive(
'The --json flag on the list action is not supported when using the --archive/--find flags.' 'The --json flag on the list action is not supported when using the --archive/--find flags.'
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# If there are any paths to find (and there's not a single archive already selected), start by # If there are any paths to find (and there's not a single archive already selected), start by
# getting a list of archives to search. # getting a list of archives to search.
@ -209,7 +209,7 @@ def list_archive(
execute_command_and_capture_output( execute_command_and_capture_output(
rlist.make_rlist_command( rlist.make_rlist_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -238,7 +238,7 @@ def list_archive(
main_command = make_list_command( main_command = make_list_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
archive_arguments, archive_arguments,
global_arguments, global_arguments,

View file

@ -10,7 +10,7 @@ def mount_archive(
repository_path, repository_path,
archive, archive,
mount_arguments, mount_arguments,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
@ -22,8 +22,8 @@ def mount_archive(
dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional
local and remote Borg paths, mount the archive onto the mount point. local and remote Borg paths, mount the archive onto the mount point.
''' '''
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path, 'mount') (local_path, 'mount')
@ -58,7 +58,7 @@ def mount_archive(
+ (tuple(mount_arguments.paths) if mount_arguments.paths else ()) + (tuple(mount_arguments.paths) if mount_arguments.paths else ())
) )
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
# Don't capture the output when foreground mode is used so that ctrl-C can work properly. # Don't capture the output when foreground mode is used so that ctrl-C can work properly.
if mount_arguments.foreground: if mount_arguments.foreground:

View file

@ -7,9 +7,9 @@ from borgmatic.execute import execute_command
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_prune_flags(storage_config, retention_config, local_borg_version): def make_prune_flags(config, local_borg_version):
''' '''
Given a retention config dict mapping from option name to value, transform it into an sequence of Given a configuration dict mapping from option name to value, transform it into an sequence of
command-line flags. command-line flags.
For example, given a retention config of: For example, given a retention config of:
@ -23,12 +23,12 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
('--keep-monthly', '6'), ('--keep-monthly', '6'),
) )
''' '''
config = retention_config.copy()
prefix = config.pop('prefix', None)
flag_pairs = ( flag_pairs = (
('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items() ('--' + option_name.replace('_', '-'), str(value))
for option_name, value in config.items()
if option_name.startswith('keep_')
) )
prefix = config.get('prefix')
return tuple(element for pair in flag_pairs for element in pair) + ( return tuple(element for pair in flag_pairs for element in pair) + (
( (
@ -39,8 +39,8 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
if prefix if prefix
else ( else (
flags.make_match_archives_flags( flags.make_match_archives_flags(
storage_config.get('match_archives'), config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -50,8 +50,7 @@ def make_prune_flags(storage_config, retention_config, local_borg_version):
def prune_archives( def prune_archives(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
retention_config,
local_borg_version, local_borg_version,
prune_arguments, prune_arguments,
global_arguments, global_arguments,
@ -59,18 +58,17 @@ def prune_archives(
remote_path=None, remote_path=None,
): ):
''' '''
Given dry-run flag, a local or remote repository path, a storage config dict, and a Given dry-run flag, a local or remote repository path, and a configuration dict, prune Borg
retention config dict, prune Borg archives according to the retention policy specified in that archives according to the retention policy specified in that configuration.
configuration.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
umask = storage_config.get('umask', None) umask = config.get('umask', None)
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '') extra_borg_options = config.get('extra_borg_options', {}).get('prune', '')
full_command = ( full_command = (
(local_path, 'prune') (local_path, 'prune')
+ make_prune_flags(storage_config, retention_config, local_borg_version) + make_prune_flags(config, local_borg_version)
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ()) + (('--umask', str(umask)) if umask else ())
+ (('--log-json',) if global_arguments.log_json else ()) + (('--log-json',) if global_arguments.log_json else ())
@ -97,5 +95,5 @@ def prune_archives(
full_command, full_command,
output_log_level=output_log_level, output_log_level=output_log_level,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -14,7 +14,7 @@ RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2
def create_repository( def create_repository(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
encryption_mode, encryption_mode,
@ -27,15 +27,15 @@ def create_repository(
remote_path=None, remote_path=None,
): ):
''' '''
Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg
Borg version, a Borg encryption mode, the path to another repo whose key material should be version, a Borg encryption mode, the path to another repo whose key material should be reused,
reused, whether the repository should be append-only, and the storage quota to use, create the whether the repository should be append-only, and the storage quota to use, create the
repository. If the repository already exists, then log and skip creation. repository. If the repository already exists, then log and skip creation.
''' '''
try: try:
rinfo.display_repository_info( rinfo.display_repository_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
argparse.Namespace(json=True), argparse.Namespace(json=True),
global_arguments, global_arguments,
@ -48,8 +48,8 @@ def create_repository(
if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE: if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE:
raise raise
lock_wait = storage_config.get('lock_wait') lock_wait = config.get('lock_wait')
extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '') extra_borg_options = config.get('extra_borg_options', {}).get('rcreate', '')
rcreate_command = ( rcreate_command = (
(local_path,) (local_path,)
@ -82,5 +82,5 @@ def create_repository(
rcreate_command, rcreate_command,
output_file=DO_NOT_CAPTURE, output_file=DO_NOT_CAPTURE,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
def display_repository_info( def display_repository_info(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rinfo_arguments, rinfo_arguments,
global_arguments, global_arguments,
@ -17,12 +17,12 @@ def display_repository_info(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, the Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary arguments to the rinfo action, and global arguments as an argparse.Namespace, display summary
information for the Borg repository or return JSON summary information. information for the Borg repository or return JSON summary information.
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
lock_wait = storage_config.get('lock_wait', None) lock_wait = config.get('lock_wait', None)
full_command = ( full_command = (
(local_path,) (local_path,)
@ -48,7 +48,7 @@ def display_repository_info(
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
) )
extra_environment = environment.make_environment(storage_config) extra_environment = environment.make_environment(config)
if rinfo_arguments.json: if rinfo_arguments.json:
return execute_command_and_capture_output( return execute_command_and_capture_output(

View file

@ -10,14 +10,14 @@ logger = logging.getLogger(__name__)
def resolve_archive_name( def resolve_archive_name(
repository_path, repository_path,
archive, archive,
storage_config, config,
local_borg_version, local_borg_version,
global_arguments, global_arguments,
local_path='borg', local_path='borg',
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg Given a local or remote repository path, an archive name, a configuration dict, the local Borg
version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path, version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path,
return the archive name. But if the archive name is "latest", then instead introspect the return the archive name. But if the archive name is "latest", then instead introspect the
repository for the latest archive and return its name. repository for the latest archive and return its name.
@ -34,7 +34,7 @@ def resolve_archive_name(
) )
+ flags.make_flags('remote-path', remote_path) + flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait')) + flags.make_flags('lock-wait', config.get('lock_wait'))
+ flags.make_flags('last', 1) + flags.make_flags('last', 1)
+ ('--short',) + ('--short',)
+ flags.make_repository_flags(repository_path, local_borg_version) + flags.make_repository_flags(repository_path, local_borg_version)
@ -42,7 +42,7 @@ def resolve_archive_name(
output = execute_command_and_capture_output( output = execute_command_and_capture_output(
full_command, full_command,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )
try: try:
latest_archive = output.strip().splitlines()[-1] latest_archive = output.strip().splitlines()[-1]
@ -59,7 +59,7 @@ MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives')
def make_rlist_command( def make_rlist_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -67,7 +67,7 @@ def make_rlist_command(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, the Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and arguments to the rlist action, global arguments as an argparse.Namespace instance, and local and
remote Borg paths, return a command as a tuple to list archives with a repository. remote Borg paths, return a command as a tuple to list archives with a repository.
''' '''
@ -88,7 +88,7 @@ def make_rlist_command(
) )
+ flags.make_flags('remote-path', remote_path) + flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait')) + flags.make_flags('lock-wait', config.get('lock_wait'))
+ ( + (
( (
flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*') flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*')
@ -98,8 +98,8 @@ def make_rlist_command(
if rlist_arguments.prefix if rlist_arguments.prefix
else ( else (
flags.make_match_archives_flags( flags.make_match_archives_flags(
rlist_arguments.match_archives or storage_config.get('match_archives'), rlist_arguments.match_archives or config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -111,7 +111,7 @@ def make_rlist_command(
def list_repository( def list_repository(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,
@ -119,17 +119,17 @@ def list_repository(
remote_path=None, remote_path=None,
): ):
''' '''
Given a local or remote repository path, a storage config dict, the local Borg version, the Given a local or remote repository path, a configuration dict, the local Borg version, the
arguments to the list action, global arguments as an argparse.Namespace instance, and local and arguments to the list action, global arguments as an argparse.Namespace instance, and local and
remote Borg paths, display the output of listing Borg archives in the given repository (or remote Borg paths, display the output of listing Borg archives in the given repository (or
return JSON output). return JSON output).
''' '''
borgmatic.logger.add_custom_log_levels() borgmatic.logger.add_custom_log_levels()
borg_environment = environment.make_environment(storage_config) borg_environment = environment.make_environment(config)
main_command = make_rlist_command( main_command = make_rlist_command(
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
rlist_arguments, rlist_arguments,
global_arguments, global_arguments,

View file

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
def transfer_archives( def transfer_archives(
dry_run, dry_run,
repository_path, repository_path,
storage_config, config,
local_borg_version, local_borg_version,
transfer_arguments, transfer_arguments,
global_arguments, global_arguments,
@ -18,7 +18,7 @@ def transfer_archives(
remote_path=None, remote_path=None,
): ):
''' '''
Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg
version, the arguments to the transfer action, and global arguments as an argparse.Namespace version, the arguments to the transfer action, and global arguments as an argparse.Namespace
instance, transfer archives to the given repository. instance, transfer archives to the given repository.
''' '''
@ -30,7 +30,7 @@ def transfer_archives(
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())
+ flags.make_flags('remote-path', remote_path) + flags.make_flags('remote-path', remote_path)
+ flags.make_flags('log-json', global_arguments.log_json) + flags.make_flags('log-json', global_arguments.log_json)
+ flags.make_flags('lock-wait', storage_config.get('lock_wait', None)) + flags.make_flags('lock-wait', config.get('lock_wait', None))
+ ( + (
flags.make_flags_from_arguments( flags.make_flags_from_arguments(
transfer_arguments, transfer_arguments,
@ -40,8 +40,8 @@ def transfer_archives(
flags.make_match_archives_flags( flags.make_match_archives_flags(
transfer_arguments.match_archives transfer_arguments.match_archives
or transfer_arguments.archive or transfer_arguments.archive
or storage_config.get('match_archives'), or config.get('match_archives'),
storage_config.get('archive_name_format'), config.get('archive_name_format'),
local_borg_version, local_borg_version,
) )
) )
@ -56,5 +56,5 @@ def transfer_archives(
output_log_level=logging.ANSWER, output_log_level=logging.ANSWER,
output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None, output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None,
borg_local_path=local_path, borg_local_path=local_path,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )

View file

@ -6,9 +6,9 @@ from borgmatic.execute import execute_command_and_capture_output
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def local_borg_version(storage_config, local_path='borg'): def local_borg_version(config, local_path='borg'):
''' '''
Given a storage configuration dict and a local Borg binary path, return a version string for it. Given a configuration dict and a local Borg binary path, return a version string for it.
Raise OSError or CalledProcessError if there is a problem running Borg. Raise OSError or CalledProcessError if there is a problem running Borg.
Raise ValueError if the version cannot be parsed. Raise ValueError if the version cannot be parsed.
@ -20,7 +20,7 @@ def local_borg_version(storage_config, local_path='borg'):
) )
output = execute_command_and_capture_output( output = execute_command_and_capture_output(
full_command, full_command,
extra_environment=environment.make_environment(storage_config), extra_environment=environment.make_environment(config),
) )
try: try:

View file

@ -330,7 +330,7 @@ def make_parsers():
) )
global_group.add_argument( global_group.add_argument(
'--override', '--override',
metavar='SECTION.OPTION=VALUE', metavar='OPTION.SUBOPTION=VALUE',
nargs='+', nargs='+',
dest='overrides', dest='overrides',
action='extend', action='extend',

View file

@ -58,16 +58,12 @@ def run_configuration(config_filename, config, arguments):
* JSON output strings from successfully executing any actions that produce JSON * JSON output strings from successfully executing any actions that produce JSON
* logging.LogRecord instances containing errors from any actions or backup hooks that fail * logging.LogRecord instances containing errors from any actions or backup hooks that fail
''' '''
(location, storage, retention, consistency, hooks) = (
config.get(section_name, {})
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
)
global_arguments = arguments['global'] global_arguments = arguments['global']
local_path = location.get('local_path', 'borg') local_path = config.get('local_path', 'borg')
remote_path = location.get('remote_path') remote_path = config.get('remote_path')
retries = storage.get('retries', 0) retries = config.get('retries', 0)
retry_wait = storage.get('retry_wait', 0) retry_wait = config.get('retry_wait', 0)
encountered_error = None encountered_error = None
error_repository = '' error_repository = ''
using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments) using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments)
@ -75,7 +71,7 @@ def run_configuration(config_filename, config, arguments):
monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED monitoring_hooks_are_activated = using_primary_action and monitoring_log_level != DISABLED
try: try:
local_borg_version = borg_version.local_borg_version(storage, local_path) local_borg_version = borg_version.local_borg_version(config, local_path)
except (OSError, CalledProcessError, ValueError) as error: except (OSError, CalledProcessError, ValueError) as error:
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error) yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
return return
@ -84,7 +80,7 @@ def run_configuration(config_filename, config, arguments):
if monitoring_hooks_are_activated: if monitoring_hooks_are_activated:
dispatch.call_hooks( dispatch.call_hooks(
'initialize_monitor', 'initialize_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitoring_log_level, monitoring_log_level,
@ -93,7 +89,7 @@ def run_configuration(config_filename, config, arguments):
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.START, monitor.State.START,
@ -109,7 +105,7 @@ def run_configuration(config_filename, config, arguments):
if not encountered_error: if not encountered_error:
repo_queue = Queue() repo_queue = Queue()
for repo in location['repositories']: for repo in config['repositories']:
repo_queue.put( repo_queue.put(
(repo, 0), (repo, 0),
) )
@ -129,11 +125,7 @@ def run_configuration(config_filename, config, arguments):
yield from run_actions( yield from run_actions(
arguments=arguments, arguments=arguments,
config_filename=config_filename, config_filename=config_filename,
location=location, config=config,
storage=storage,
retention=retention,
consistency=consistency,
hooks=hooks,
local_path=local_path, local_path=local_path,
remote_path=remote_path, remote_path=remote_path,
local_borg_version=local_borg_version, local_borg_version=local_borg_version,
@ -172,7 +164,7 @@ def run_configuration(config_filename, config, arguments):
# send logs irrespective of error # send logs irrespective of error
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.LOG, monitor.State.LOG,
@ -191,7 +183,7 @@ def run_configuration(config_filename, config, arguments):
if monitoring_hooks_are_activated: if monitoring_hooks_are_activated:
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.FINISH, monitor.State.FINISH,
@ -200,7 +192,7 @@ def run_configuration(config_filename, config, arguments):
) )
dispatch.call_hooks( dispatch.call_hooks(
'destroy_monitor', 'destroy_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitoring_log_level, monitoring_log_level,
@ -216,8 +208,8 @@ def run_configuration(config_filename, config, arguments):
if encountered_error and using_primary_action: if encountered_error and using_primary_action:
try: try:
command.execute_hook( command.execute_hook(
hooks.get('on_error'), config.get('on_error'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'on-error', 'on-error',
global_arguments.dry_run, global_arguments.dry_run,
@ -227,7 +219,7 @@ def run_configuration(config_filename, config, arguments):
) )
dispatch.call_hooks( dispatch.call_hooks(
'ping_monitor', 'ping_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitor.State.FAIL, monitor.State.FAIL,
@ -236,7 +228,7 @@ def run_configuration(config_filename, config, arguments):
) )
dispatch.call_hooks( dispatch.call_hooks(
'destroy_monitor', 'destroy_monitor',
hooks, config,
config_filename, config_filename,
monitor.MONITOR_HOOK_NAMES, monitor.MONITOR_HOOK_NAMES,
monitoring_log_level, monitoring_log_level,
@ -253,11 +245,7 @@ def run_actions(
*, *,
arguments, arguments,
config_filename, config_filename,
location, config,
storage,
retention,
consistency,
hooks,
local_path, local_path,
remote_path, remote_path,
local_borg_version, local_borg_version,
@ -282,13 +270,13 @@ def run_actions(
hook_context = { hook_context = {
'repository': repository_path, 'repository': repository_path,
# Deprecated: For backwards compatibility with borgmatic < 1.6.0. # Deprecated: For backwards compatibility with borgmatic < 1.6.0.
'repositories': ','.join([repo['path'] for repo in location['repositories']]), 'repositories': ','.join([repo['path'] for repo in config['repositories']]),
'log_file': global_arguments.log_file if global_arguments.log_file else '', 'log_file': global_arguments.log_file if global_arguments.log_file else '',
} }
command.execute_hook( command.execute_hook(
hooks.get('before_actions'), config.get('before_actions'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-actions', 'pre-actions',
global_arguments.dry_run, global_arguments.dry_run,
@ -299,7 +287,7 @@ def run_actions(
if action_name == 'rcreate': if action_name == 'rcreate':
borgmatic.actions.rcreate.run_rcreate( borgmatic.actions.rcreate.run_rcreate(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -309,7 +297,7 @@ def run_actions(
elif action_name == 'transfer': elif action_name == 'transfer':
borgmatic.actions.transfer.run_transfer( borgmatic.actions.transfer.run_transfer(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -320,9 +308,7 @@ def run_actions(
yield from borgmatic.actions.create.run_create( yield from borgmatic.actions.create.run_create(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -335,9 +321,7 @@ def run_actions(
borgmatic.actions.prune.run_prune( borgmatic.actions.prune.run_prune(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -350,9 +334,7 @@ def run_actions(
borgmatic.actions.compact.run_compact( borgmatic.actions.compact.run_compact(
config_filename, config_filename,
repository, repository,
storage, config,
retention,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -362,14 +344,11 @@ def run_actions(
remote_path, remote_path,
) )
elif action_name == 'check': elif action_name == 'check':
if checks.repository_enabled_for_checks(repository, consistency): if checks.repository_enabled_for_checks(repository, config):
borgmatic.actions.check.run_check( borgmatic.actions.check.run_check(
config_filename, config_filename,
repository, repository,
location, config,
storage,
consistency,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -381,9 +360,7 @@ def run_actions(
borgmatic.actions.extract.run_extract( borgmatic.actions.extract.run_extract(
config_filename, config_filename,
repository, repository,
location, config,
storage,
hooks,
hook_context, hook_context,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
@ -394,7 +371,7 @@ def run_actions(
elif action_name == 'export-tar': elif action_name == 'export-tar':
borgmatic.actions.export_tar.run_export_tar( borgmatic.actions.export_tar.run_export_tar(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -404,7 +381,7 @@ def run_actions(
elif action_name == 'mount': elif action_name == 'mount':
borgmatic.actions.mount.run_mount( borgmatic.actions.mount.run_mount(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -414,9 +391,7 @@ def run_actions(
elif action_name == 'restore': elif action_name == 'restore':
borgmatic.actions.restore.run_restore( borgmatic.actions.restore.run_restore(
repository, repository,
location, config,
storage,
hooks,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -426,7 +401,7 @@ def run_actions(
elif action_name == 'rlist': elif action_name == 'rlist':
yield from borgmatic.actions.rlist.run_rlist( yield from borgmatic.actions.rlist.run_rlist(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -436,7 +411,7 @@ def run_actions(
elif action_name == 'list': elif action_name == 'list':
yield from borgmatic.actions.list.run_list( yield from borgmatic.actions.list.run_list(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -446,7 +421,7 @@ def run_actions(
elif action_name == 'rinfo': elif action_name == 'rinfo':
yield from borgmatic.actions.rinfo.run_rinfo( yield from borgmatic.actions.rinfo.run_rinfo(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -456,7 +431,7 @@ def run_actions(
elif action_name == 'info': elif action_name == 'info':
yield from borgmatic.actions.info.run_info( yield from borgmatic.actions.info.run_info(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -466,7 +441,7 @@ def run_actions(
elif action_name == 'break-lock': elif action_name == 'break-lock':
borgmatic.actions.break_lock.run_break_lock( borgmatic.actions.break_lock.run_break_lock(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -476,7 +451,7 @@ def run_actions(
elif action_name == 'borg': elif action_name == 'borg':
borgmatic.actions.borg.run_borg( borgmatic.actions.borg.run_borg(
repository, repository,
storage, config,
local_borg_version, local_borg_version,
action_arguments, action_arguments,
global_arguments, global_arguments,
@ -485,8 +460,8 @@ def run_actions(
) )
command.execute_hook( command.execute_hook(
hooks.get('after_actions'), config.get('after_actions'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-actions', 'post-actions',
global_arguments.dry_run, global_arguments.dry_run,
@ -613,7 +588,7 @@ def get_local_path(configs):
Arbitrarily return the local path from the first configuration dict. Default to "borg" if not Arbitrarily return the local path from the first configuration dict. Default to "borg" if not
set. set.
''' '''
return next(iter(configs.values())).get('location', {}).get('local_path', 'borg') return next(iter(configs.values())).get('local_path', 'borg')
def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors): def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors):
@ -627,6 +602,8 @@ def collect_highlander_action_summary_logs(configs, arguments, configuration_par
A highlander action is an action that cannot coexist with other actions on the borgmatic A highlander action is an action that cannot coexist with other actions on the borgmatic
command-line, and borgmatic exits after processing such an action. command-line, and borgmatic exits after processing such an action.
''' '''
add_custom_log_levels()
if 'bootstrap' in arguments: if 'bootstrap' in arguments:
try: try:
# No configuration file is needed for bootstrap. # No configuration file is needed for bootstrap.
@ -744,10 +721,9 @@ def collect_configuration_run_summary_logs(configs, arguments):
if 'create' in arguments: if 'create' in arguments:
try: try:
for config_filename, config in configs.items(): for config_filename, config in configs.items():
hooks = config.get('hooks', {})
command.execute_hook( command.execute_hook(
hooks.get('before_everything'), config.get('before_everything'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'pre-everything', 'pre-everything',
arguments['global'].dry_run, arguments['global'].dry_run,
@ -792,10 +768,9 @@ def collect_configuration_run_summary_logs(configs, arguments):
if 'create' in arguments: if 'create' in arguments:
try: try:
for config_filename, config in configs.items(): for config_filename, config in configs.items():
hooks = config.get('hooks', {})
command.execute_hook( command.execute_hook(
hooks.get('after_everything'), config.get('after_everything'),
hooks.get('umask'), config.get('umask'),
config_filename, config_filename,
'post-everything', 'post-everything',
arguments['global'].dry_run, arguments['global'].dry_run,

View file

@ -11,7 +11,7 @@ INDENT = 4
SEQUENCE_INDENT = 2 SEQUENCE_INDENT = 2
def _insert_newline_before_comment(config, field_name): def insert_newline_before_comment(config, field_name):
''' '''
Using some ruamel.yaml black magic, insert a blank line in the config right before the given Using some ruamel.yaml black magic, insert a blank line in the config right before the given
field and its comments. field and its comments.
@ -21,10 +21,10 @@ def _insert_newline_before_comment(config, field_name):
) )
def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): def schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
''' '''
Given a loaded configuration schema, generate and return sample config for it. Include comments Given a loaded configuration schema, generate and return sample config for it. Include comments
for each section based on the schema "description". for each option based on the schema "description".
''' '''
schema_type = schema.get('type') schema_type = schema.get('type')
example = schema.get('example') example = schema.get('example')
@ -33,13 +33,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
if schema_type == 'array': if schema_type == 'array':
config = yaml.comments.CommentedSeq( config = yaml.comments.CommentedSeq(
[_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] [schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)]
) )
add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT))
elif schema_type == 'object': elif schema_type == 'object':
config = yaml.comments.CommentedMap( config = yaml.comments.CommentedMap(
[ [
(field_name, _schema_to_sample_configuration(sub_schema, level + 1)) (field_name, schema_to_sample_configuration(sub_schema, level + 1))
for field_name, sub_schema in schema['properties'].items() for field_name, sub_schema in schema['properties'].items()
] ]
) )
@ -53,13 +53,13 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
return config return config
def _comment_out_line(line): def comment_out_line(line):
# If it's already is commented out (or empty), there's nothing further to do! # If it's already is commented out (or empty), there's nothing further to do!
stripped_line = line.lstrip() stripped_line = line.lstrip()
if not stripped_line or stripped_line.startswith('#'): if not stripped_line or stripped_line.startswith('#'):
return line return line
# Comment out the names of optional sections, inserting the '#' after any indent for aesthetics. # Comment out the names of optional options, inserting the '#' after any indent for aesthetics.
matches = re.match(r'(\s*)', line) matches = re.match(r'(\s*)', line)
indent_spaces = matches.group(0) if matches else '' indent_spaces = matches.group(0) if matches else ''
count_indent_spaces = len(indent_spaces) count_indent_spaces = len(indent_spaces)
@ -67,7 +67,7 @@ def _comment_out_line(line):
return '# '.join((indent_spaces, line[count_indent_spaces:])) return '# '.join((indent_spaces, line[count_indent_spaces:]))
def _comment_out_optional_configuration(rendered_config): def comment_out_optional_configuration(rendered_config):
''' '''
Post-process a rendered configuration string to comment out optional key/values, as determined Post-process a rendered configuration string to comment out optional key/values, as determined
by a sentinel in the comment before each key. by a sentinel in the comment before each key.
@ -92,7 +92,7 @@ def _comment_out_optional_configuration(rendered_config):
if not line.strip(): if not line.strip():
optional = False optional = False
lines.append(_comment_out_line(line) if optional else line) lines.append(comment_out_line(line) if optional else line)
return '\n'.join(lines) return '\n'.join(lines)
@ -165,7 +165,6 @@ def add_comments_to_configuration_sequence(config, schema, indent=0):
return return
REQUIRED_SECTION_NAMES = {'location', 'retention'}
REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'} REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'}
COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' COMMENTED_OUT_SENTINEL = 'COMMENT_OUT'
@ -185,7 +184,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa
# If this is an optional key, add an indicator to the comment flagging it to be commented # If this is an optional key, add an indicator to the comment flagging it to be commented
# out from the sample configuration. This sentinel is consumed by downstream processing that # out from the sample configuration. This sentinel is consumed by downstream processing that
# does the actual commenting out. # does the actual commenting out.
if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS: if field_name not in REQUIRED_KEYS:
description = ( description = (
'\n'.join((description, COMMENTED_OUT_SENTINEL)) '\n'.join((description, COMMENTED_OUT_SENTINEL))
if description if description
@ -199,7 +198,7 @@ def add_comments_to_configuration_object(config, schema, indent=0, skip_first=Fa
config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent) config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent)
if index > 0: if index > 0:
_insert_newline_before_comment(config, field_name) insert_newline_before_comment(config, field_name)
RUAMEL_YAML_COMMENTS_INDEX = 1 RUAMEL_YAML_COMMENTS_INDEX = 1
@ -284,7 +283,7 @@ def generate_sample_configuration(
normalize.normalize(source_filename, source_config) normalize.normalize(source_filename, source_config)
destination_config = merge_source_configuration_into_destination( destination_config = merge_source_configuration_into_destination(
_schema_to_sample_configuration(schema), source_config schema_to_sample_configuration(schema), source_config
) )
if dry_run: if dry_run:
@ -292,6 +291,6 @@ def generate_sample_configuration(
write_configuration( write_configuration(
destination_filename, destination_filename,
_comment_out_optional_configuration(render_configuration(destination_config)), comment_out_optional_configuration(render_configuration(destination_config)),
overwrite=overwrite, overwrite=overwrite,
) )

View file

@ -97,8 +97,8 @@ class Include_constructor(ruamel.yaml.SafeConstructor):
``` ```
These includes are deep merged into the current configuration file. For instance, in this These includes are deep merged into the current configuration file. For instance, in this
example, any "retention" options in common.yaml will get merged into the "retention" section example, any "option" with sub-options in common.yaml will get merged into the corresponding
in the example configuration file. "option" with sub-options in the example configuration file.
''' '''
representer = ruamel.yaml.representer.SafeRepresenter() representer = ruamel.yaml.representer.SafeRepresenter()
@ -116,7 +116,7 @@ def load_configuration(filename):
''' '''
Load the given configuration file and return its contents as a data structure of nested dicts Load the given configuration file and return its contents as a data structure of nested dicts
and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the and lists. Also, replace any "{constant}" strings with the value of the "constant" key in the
"constants" section of the configuration file. "constants" option of the configuration file.
Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError
if there are too many recursive includes. if there are too many recursive includes.
@ -223,8 +223,8 @@ def deep_merge_nodes(nodes):
If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged. If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged.
The purpose of deep merging like this is to support, for instance, merging one borgmatic The purpose of deep merging like this is to support, for instance, merging one borgmatic
configuration file into another for reuse, such that a configuration section ("retention", configuration file into another for reuse, such that a configuration option with sub-options
etc.) does not completely replace the corresponding section in a merged file. does not completely replace the corresponding option in a merged file.
Raise ValueError if a merge is implied using two incompatible types. Raise ValueError if a merge is implied using two incompatible types.
''' '''

View file

@ -2,21 +2,70 @@ import logging
import os import os
def normalize_sections(config_filename, config):
'''
Given a configuration filename and a configuration dict of its loaded contents, airlift any
options out of sections ("location:", etc.) to the global scope and delete those sections.
Return any log message warnings produced based on the normalization performed.
Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections.
'''
location = config.get('location') or {}
storage = config.get('storage') or {}
consistency = config.get('consistency') or {}
hooks = config.get('hooks') or {}
if (
location.get('prefix')
and consistency.get('prefix')
and location.get('prefix') != consistency.get('prefix')
):
raise ValueError(
'The retention prefix and the consistency prefix cannot have different values (unless one is not set).'
)
if storage.get('umask') and hooks.get('umask') and storage.get('umask') != hooks.get('umask'):
raise ValueError(
'The storage umask and the hooks umask cannot have different values (unless one is not set).'
)
any_section_upgraded = False
# Move any options from deprecated sections into the global scope.
for section_name in ('location', 'storage', 'retention', 'consistency', 'output', 'hooks'):
section_config = config.get(section_name)
if section_config:
any_section_upgraded = True
del config[section_name]
config.update(section_config)
if any_section_upgraded:
return [
logging.makeLogRecord(
dict(
levelno=logging.WARNING,
levelname='WARNING',
msg=f'{config_filename}: Configuration sections like location: and storage: are deprecated and support will be removed from a future release. Move all of your options out of sections to the global scope.',
)
)
]
return []
def normalize(config_filename, config): def normalize(config_filename, config):
''' '''
Given a configuration filename and a configuration dict of its loaded contents, apply particular Given a configuration filename and a configuration dict of its loaded contents, apply particular
hard-coded rules to normalize the configuration to adhere to the current schema. Return any log hard-coded rules to normalize the configuration to adhere to the current schema. Return any log
message warnings produced based on the normalization performed. message warnings produced based on the normalization performed.
Raise ValueError the configuration cannot be normalized.
''' '''
logs = [] logs = normalize_sections(config_filename, config)
location = config.get('location') or {}
storage = config.get('storage') or {}
consistency = config.get('consistency') or {}
retention = config.get('retention') or {}
hooks = config.get('hooks') or {}
# Upgrade exclude_if_present from a string to a list. # Upgrade exclude_if_present from a string to a list.
exclude_if_present = location.get('exclude_if_present') exclude_if_present = config.get('exclude_if_present')
if isinstance(exclude_if_present, str): if isinstance(exclude_if_present, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -27,10 +76,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['exclude_if_present'] = [exclude_if_present] config['exclude_if_present'] = [exclude_if_present]
# Upgrade various monitoring hooks from a string to a dict. # Upgrade various monitoring hooks from a string to a dict.
healthchecks = hooks.get('healthchecks') healthchecks = config.get('healthchecks')
if isinstance(healthchecks, str): if isinstance(healthchecks, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -41,9 +90,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['healthchecks'] = {'ping_url': healthchecks} config['healthchecks'] = {'ping_url': healthchecks}
cronitor = hooks.get('cronitor') cronitor = config.get('cronitor')
if isinstance(cronitor, str): if isinstance(cronitor, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -54,9 +103,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['cronitor'] = {'ping_url': cronitor} config['cronitor'] = {'ping_url': cronitor}
pagerduty = hooks.get('pagerduty') pagerduty = config.get('pagerduty')
if isinstance(pagerduty, str): if isinstance(pagerduty, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -67,9 +116,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['pagerduty'] = {'integration_key': pagerduty} config['pagerduty'] = {'integration_key': pagerduty}
cronhub = hooks.get('cronhub') cronhub = config.get('cronhub')
if isinstance(cronhub, str): if isinstance(cronhub, str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -80,10 +129,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['hooks']['cronhub'] = {'ping_url': cronhub} config['cronhub'] = {'ping_url': cronhub}
# Upgrade consistency checks from a list of strings to a list of dicts. # Upgrade consistency checks from a list of strings to a list of dicts.
checks = consistency.get('checks') checks = config.get('checks')
if isinstance(checks, list) and len(checks) and isinstance(checks[0], str): if isinstance(checks, list) and len(checks) and isinstance(checks[0], str):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -94,10 +143,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['consistency']['checks'] = [{'name': check_type} for check_type in checks] config['checks'] = [{'name': check_type} for check_type in checks]
# Rename various configuration options. # Rename various configuration options.
numeric_owner = location.pop('numeric_owner', None) numeric_owner = config.pop('numeric_owner', None)
if numeric_owner is not None: if numeric_owner is not None:
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -108,9 +157,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['numeric_ids'] = numeric_owner config['numeric_ids'] = numeric_owner
bsd_flags = location.pop('bsd_flags', None) bsd_flags = config.pop('bsd_flags', None)
if bsd_flags is not None: if bsd_flags is not None:
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -121,9 +170,9 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['flags'] = bsd_flags config['flags'] = bsd_flags
remote_rate_limit = storage.pop('remote_rate_limit', None) remote_rate_limit = config.pop('remote_rate_limit', None)
if remote_rate_limit is not None: if remote_rate_limit is not None:
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
@ -134,10 +183,10 @@ def normalize(config_filename, config):
) )
) )
) )
config['storage']['upload_rate_limit'] = remote_rate_limit config['upload_rate_limit'] = remote_rate_limit
# Upgrade remote repositories to ssh:// syntax, required in Borg 2. # Upgrade remote repositories to ssh:// syntax, required in Borg 2.
repositories = location.get('repositories') repositories = config.get('repositories')
if repositories: if repositories:
if isinstance(repositories[0], str): if isinstance(repositories[0], str):
logs.append( logs.append(
@ -149,11 +198,11 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['repositories'] = [ config['repositories'] = [{'path': repository} for repository in repositories]
{'path': repository} for repository in repositories repositories = config['repositories']
]
repositories = config['location']['repositories'] config['repositories'] = []
config['location']['repositories'] = []
for repository_dict in repositories: for repository_dict in repositories:
repository_path = repository_dict['path'] repository_path = repository_dict['path']
if '~' in repository_path: if '~' in repository_path:
@ -171,14 +220,14 @@ def normalize(config_filename, config):
updated_repository_path = os.path.abspath( updated_repository_path = os.path.abspath(
repository_path.partition('file://')[-1] repository_path.partition('file://')[-1]
) )
config['location']['repositories'].append( config['repositories'].append(
dict( dict(
repository_dict, repository_dict,
path=updated_repository_path, path=updated_repository_path,
) )
) )
elif repository_path.startswith('ssh://'): elif repository_path.startswith('ssh://'):
config['location']['repositories'].append(repository_dict) config['repositories'].append(repository_dict)
else: else:
rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}"
logs.append( logs.append(
@ -190,16 +239,16 @@ def normalize(config_filename, config):
) )
) )
) )
config['location']['repositories'].append( config['repositories'].append(
dict( dict(
repository_dict, repository_dict,
path=rewritten_repository_path, path=rewritten_repository_path,
) )
) )
else: else:
config['location']['repositories'].append(repository_dict) config['repositories'].append(repository_dict)
if consistency.get('prefix') or retention.get('prefix'): if config.get('prefix'):
logs.append( logs.append(
logging.makeLogRecord( logging.makeLogRecord(
dict( dict(

View file

@ -32,19 +32,33 @@ def convert_value_type(value):
return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value))
LEGACY_SECTION_NAMES = {'location', 'storage', 'retention', 'consistency', 'output', 'hooks'}
def strip_section_names(parsed_override_key):
'''
Given a parsed override key as a tuple of option and suboption names, strip out any initial
legacy section names, since configuration file normalization also strips them out.
'''
if parsed_override_key[0] in LEGACY_SECTION_NAMES:
return parsed_override_key[1:]
return parsed_override_key
def parse_overrides(raw_overrides): def parse_overrides(raw_overrides):
''' '''
Given a sequence of configuration file override strings in the form of "section.option=value", Given a sequence of configuration file override strings in the form of "option.suboption=value",
parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For
instance, given the following raw overrides: instance, given the following raw overrides:
['section.my_option=value1', 'section.other_option=value2'] ['my_option.suboption=value1', 'other_option=value2']
... return this: ... return this:
( (
(('section', 'my_option'), 'value1'), (('my_option', 'suboption'), 'value1'),
(('section', 'other_option'), 'value2'), (('other_option'), 'value2'),
) )
Raise ValueError if an override can't be parsed. Raise ValueError if an override can't be parsed.
@ -59,13 +73,13 @@ def parse_overrides(raw_overrides):
raw_keys, value = raw_override.split('=', 1) raw_keys, value = raw_override.split('=', 1)
parsed_overrides.append( parsed_overrides.append(
( (
tuple(raw_keys.split('.')), strip_section_names(tuple(raw_keys.split('.'))),
convert_value_type(value), convert_value_type(value),
) )
) )
except ValueError: except ValueError:
raise ValueError( raise ValueError(
f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE" f"Invalid override '{raw_override}'. Make sure you use the form: OPTION=VALUE or OPTION.SUBOPTION=VALUE"
) )
except ruamel.yaml.error.YAMLError as error: except ruamel.yaml.error.YAMLError as error:
raise ValueError(f"Invalid override '{raw_override}': {error.problem}") raise ValueError(f"Invalid override '{raw_override}': {error.problem}")
@ -76,7 +90,7 @@ def parse_overrides(raw_overrides):
def apply_overrides(config, raw_overrides): def apply_overrides(config, raw_overrides):
''' '''
Given a configuration dict and a sequence of configuration file override strings in the form of Given a configuration dict and a sequence of configuration file override strings in the form of
"section.option=value", parse each override and set it the configuration dict. "option.suboption=value", parse each override and set it the configuration dict.
''' '''
overrides = parse_overrides(raw_overrides) overrides = parse_overrides(raw_overrides)

File diff suppressed because it is too large Load diff

View file

@ -71,18 +71,15 @@ def apply_logical_validation(config_filename, parsed_configuration):
below), run through any additional logical validation checks. If there are any such validation below), run through any additional logical validation checks. If there are any such validation
problems, raise a Validation_error. problems, raise a Validation_error.
''' '''
location_repositories = parsed_configuration.get('location', {}).get('repositories') repositories = parsed_configuration.get('repositories')
check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', []) check_repositories = parsed_configuration.get('check_repositories', [])
for repository in check_repositories: for repository in check_repositories:
if not any( if not any(
repositories_match(repository, config_repository) repositories_match(repository, config_repository) for config_repository in repositories
for config_repository in location_repositories
): ):
raise Validation_error( raise Validation_error(
config_filename, config_filename,
( (f'Unknown repository in "check_repositories": {repository}',),
f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}',
),
) )
@ -90,11 +87,15 @@ def parse_configuration(config_filename, schema_filename, overrides=None, resolv
''' '''
Given the path to a config filename in YAML format, the path to a schema filename in a YAML Given the path to a config filename in YAML format, the path to a schema filename in a YAML
rendition of JSON Schema format, a sequence of configuration file override strings in the form rendition of JSON Schema format, a sequence of configuration file override strings in the form
of "section.option=value", return the parsed configuration as a data structure of nested dicts of "option.suboption=value", return the parsed configuration as a data structure of nested dicts
and lists corresponding to the schema. Example return value: and lists corresponding to the schema. Example return value:
{'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'}, {
'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}} 'source_directories': ['/home', '/etc'],
'repository': 'hostname.borg',
'keep_daily': 7,
'checks': ['repository', 'archives'],
}
Also return a sequence of logging.LogRecord instances containing any warnings about the Also return a sequence of logging.LogRecord instances containing any warnings about the
configuration. configuration.
@ -174,7 +175,7 @@ def guard_configuration_contains_repository(repository, configurations):
tuple( tuple(
config_repository config_repository
for config in configurations.values() for config in configurations.values()
for config_repository in config['location']['repositories'] for config_repository in config['repositories']
if repositories_match(config_repository, repository) if repositories_match(config_repository, repository)
) )
) )
@ -198,7 +199,7 @@ def guard_single_repository_selected(repository, configurations):
tuple( tuple(
config_repository config_repository
for config in configurations.values() for config in configurations.values()
for config_repository in config['location']['repositories'] for config_repository in config['repositories']
) )
) )

View file

@ -22,7 +22,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything. filename in any log entries. If this is a dry run, then don't actually ping anything.

View file

@ -22,7 +22,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration
filename in any log entries. If this is a dry run, then don't actually ping anything. filename in any log entries. If this is a dry run, then don't actually ping anything.

View file

@ -27,18 +27,17 @@ HOOK_NAME_TO_MODULE = {
} }
def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): def call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs):
''' '''
Given the hooks configuration dict and a prefix to use in log entries, call the requested Given a configuration dict and a prefix to use in log entries, call the requested function of
function of the Python module corresponding to the given hook name. Supply that call with the the Python module corresponding to the given hook name. Supply that call with the configuration
configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any for this hook (if any), the log prefix, and any given args and kwargs. Return any return value.
return value.
Raise ValueError if the hook name is unknown. Raise ValueError if the hook name is unknown.
Raise AttributeError if the function name is not found in the module. Raise AttributeError if the function name is not found in the module.
Raise anything else that the called function raises. Raise anything else that the called function raises.
''' '''
config = hooks.get(hook_name, {}) hook_config = config.get(hook_name, {})
try: try:
module = HOOK_NAME_TO_MODULE[hook_name] module = HOOK_NAME_TO_MODULE[hook_name]
@ -46,15 +45,15 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
raise ValueError(f'Unknown hook name: {hook_name}') raise ValueError(f'Unknown hook name: {hook_name}')
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}') logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
return getattr(module, function_name)(config, log_prefix, *args, **kwargs) return getattr(module, function_name)(hook_config, config, log_prefix, *args, **kwargs)
def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): def call_hooks(function_name, config, log_prefix, hook_names, *args, **kwargs):
''' '''
Given the hooks configuration dict and a prefix to use in log entries, call the requested Given a configuration dict and a prefix to use in log entries, call the requested function of
function of the Python module corresponding to each given hook name. Supply each call with the the Python module corresponding to each given hook name. Supply each call with the configuration
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return for that hook, the log prefix, and any given args and kwargs. Collect any return values into a
values into a dict from hook name to return value. dict from hook name to return value.
If the hook name is not present in the hooks configuration, then don't call the function for it If the hook name is not present in the hooks configuration, then don't call the function for it
and omit it from the return values. and omit it from the return values.
@ -64,23 +63,23 @@ def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
Raise anything else that a called function raises. An error stops calls to subsequent functions. Raise anything else that a called function raises. An error stops calls to subsequent functions.
''' '''
return { return {
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
for hook_name in hook_names for hook_name in hook_names
if hooks.get(hook_name) if config.get(hook_name)
} }
def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs): def call_hooks_even_if_unconfigured(function_name, config, log_prefix, hook_names, *args, **kwargs):
''' '''
Given the hooks configuration dict and a prefix to use in log entries, call the requested Given a configuration dict and a prefix to use in log entries, call the requested function of
function of the Python module corresponding to each given hook name. Supply each call with the the Python module corresponding to each given hook name. Supply each call with the configuration
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return for that hook, the log prefix, and any given args and kwargs. Collect any return values into a
values into a dict from hook name to return value. dict from hook name to return value.
Raise AttributeError if the function name is not found in the module. Raise AttributeError if the function name is not found in the module.
Raise anything else that a called function raises. An error stops calls to subsequent functions. Raise anything else that a called function raises. An error stops calls to subsequent functions.
''' '''
return { return {
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) hook_name: call_hook(function_name, config, log_prefix, hook_name, *args, **kwargs)
for hook_name in hook_names for hook_name in hook_names
} }

View file

@ -90,7 +90,7 @@ def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_r
) )
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given
configuration filename in any log entries, and log to Healthchecks with the giving log level. configuration filename in any log entries, and log to Healthchecks with the giving log level.

View file

@ -6,21 +6,20 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'mongodb_databases' config.get('borgmatic_source_directory'), 'mongodb_databases'
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log dicts, one dict describing each database as per the configuration schema. Use the configuration
prefix in any log entries. Use the given location configuration dict to construct the dict to construct the destination path and the given log prefix in any log entries.
destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -33,7 +32,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
for database in databases: for database in databases:
name = database['name'] name = database['name']
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), name, database.get('hostname') make_dump_path(config), name, database.get('hostname')
) )
dump_format = database.get('format', 'archive') dump_format = database.get('format', 'archive')
@ -82,35 +81,33 @@ def build_dump_command(database, dump_filename, dump_format):
return command return command
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove all database dump files for this hook regardless of the given databases. Use the log Remove all database dump files for this hook regardless of the given databases. Use the log
prefix in any log entries. Use the given location configuration dict to construct the prefix in any log entries. Use the given configuration dict to construct the destination path.
destination path. If this is a dry run, then don't actually remove anything. If this is a dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'MongoDB', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, Given a sequence of database configurations dicts, a configuration dict, a prefix to log with,
and a database name to match, return the corresponding glob patterns to match the database dump and a database name to match, return the corresponding glob patterns to match the database dump
in an archive. in an archive.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params database_config, config, log_prefix, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given MongoDB database from an extract stream. The database is supplied as a Restore the given MongoDB database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema. one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore Use the configuration dict to construct the destination path and the given log prefix in any log
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce entries. If this is a dry run, then don't actually restore anything. Trigger the given active
output to consume. extract process (an instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream. extract stream.
@ -122,7 +119,7 @@ def restore_database_dump(
database = database_config[0] database = database_config[0]
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname') make_dump_path(config), database['name'], database.get('hostname')
) )
restore_command = build_restore_command( restore_command = build_restore_command(
extract_process, database, dump_filename, connection_params extract_process, database, dump_filename, connection_params

View file

@ -12,13 +12,11 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(config.get('borgmatic_source_directory'), 'mysql_databases')
location_config.get('borgmatic_source_directory'), 'mysql_databases'
)
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys') SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
@ -106,12 +104,11 @@ def execute_dump_command(
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
of dicts, one dict describing each database as per the configuration schema. Use the given log of dicts, one dict describing each database as per the configuration schema. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the given log prefix in any log entries.
destination path.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -122,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}') logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
for database in databases: for database in databases:
dump_path = make_dump_path(location_config) dump_path = make_dump_path(config)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump( dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run database, extra_environment, log_prefix, dry_run
@ -165,28 +162,26 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return [process for process in processes if process] return [process for process in processes if process]
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove all database dump files for this hook regardless of the given databases. Use the log Remove all database dump files for this hook regardless of the given databases. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the log prefix in any log entries. If
destination path. If this is a dry run, then don't actually remove anything. this is a dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'MySQL', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
and a database name to match, return the corresponding glob patterns to match the database dump database name to match, return the corresponding glob patterns to match the database dump in an
in an archive. archive.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params database_config, config, log_prefix, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a

View file

@ -14,7 +14,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
Ping the configured Ntfy topic. Use the given configuration filename in any log entries. Ping the configured Ntfy topic. Use the given configuration filename in any log entries.
If this is a dry run, then don't actually ping anything. If this is a dry run, then don't actually ping anything.

View file

@ -21,7 +21,7 @@ def initialize_monitor(
pass pass
def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
''' '''
If this is an error state, create a PagerDuty event with the configured integration key. Use If this is an error state, create a PagerDuty event with the configured integration key. Use
the given configuration filename in any log entries. If this is a dry run, then don't actually the given configuration filename in any log entries. If this is a dry run, then don't actually

View file

@ -14,12 +14,12 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'postgresql_databases' config.get('borgmatic_source_directory'), 'postgresql_databases'
) )
@ -92,12 +92,12 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run):
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of
dicts, one dict describing each database as per the configuration schema. Use the given log dicts, one dict describing each database as per the configuration schema. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the given log prefix in any log
destination path. entries.
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
@ -111,7 +111,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
for database in databases: for database in databases:
extra_environment = make_extra_environment(database) extra_environment = make_extra_environment(database)
dump_path = make_dump_path(location_config) dump_path = make_dump_path(config)
dump_database_names = database_names_to_dump( dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run database, extra_environment, log_prefix, dry_run
) )
@ -183,35 +183,33 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return processes return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove all database dump files for this hook regardless of the given databases. Use the log Remove all database dump files for this hook regardless of the given databases. Use the given
prefix in any log entries. Use the given location configuration dict to construct the configuration dict to construct the destination path and the log prefix in any log entries. If
destination path. If this is a dry run, then don't actually remove anything. this is a dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'PostgreSQL', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, Given a sequence of configurations dicts, a configuration dict, a prefix to log with, and a
and a database name to match, return the corresponding glob patterns to match the database dump database name to match, return the corresponding glob patterns to match the database dump in an
in an archive. archive.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') return dump.make_database_dump_filename(make_dump_path(config), name, hostname='*')
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params database_config, config, log_prefix, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given PostgreSQL database from an extract stream. The database is supplied as a Restore the given PostgreSQL database from an extract stream. The database is supplied as a
one-element sequence containing a dict describing the database, as per the configuration schema. one-element sequence containing a dict describing the database, as per the configuration schema.
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore Use the given configuration dict to construct the destination path and the given log prefix in
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce any log entries. If this is a dry run, then don't actually restore anything. Trigger the given
output to consume. active extract process (an instance of subprocess.Popen) to produce output to consume.
If the extract process is None, then restore the dump from the filesystem rather than from an If the extract process is None, then restore the dump from the filesystem rather than from an
extract stream. extract stream.
@ -236,7 +234,7 @@ def restore_database_dump(
all_databases = bool(database['name'] == 'all') all_databases = bool(database['name'] == 'all')
dump_filename = dump.make_database_dump_filename( dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), database['name'], database.get('hostname') make_dump_path(config), database['name'], database.get('hostname')
) )
psql_command = shlex.split(database.get('psql_command') or 'psql') psql_command = shlex.split(database.get('psql_command') or 'psql')
analyze_command = ( analyze_command = (

View file

@ -7,21 +7,21 @@ from borgmatic.hooks import dump
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_dump_path(location_config): # pragma: no cover def make_dump_path(config): # pragma: no cover
''' '''
Make the dump path from the given location configuration and the name of this hook. Make the dump path from the given configuration dict and the name of this hook.
''' '''
return dump.make_database_dump_path( return dump.make_database_dump_path(
location_config.get('borgmatic_source_directory'), 'sqlite_databases' config.get('borgmatic_source_directory'), 'sqlite_databases'
) )
def dump_databases(databases, log_prefix, location_config, dry_run): def dump_databases(databases, config, log_prefix, dry_run):
''' '''
Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of
configuration dicts, as per the configuration schema. Use the given log prefix in any log configuration dicts, as per the configuration schema. Use the given configuration dict to
entries. Use the given location configuration dict to construct the destination path. If this construct the destination path and the given log prefix in any log entries. If this is a dry
is a dry run, then don't actually dump anything. run, then don't actually dump anything.
''' '''
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
processes = [] processes = []
@ -38,7 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped' f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped'
) )
dump_path = make_dump_path(location_config) dump_path = make_dump_path(config)
dump_filename = dump.make_database_dump_filename(dump_path, database['name']) dump_filename = dump.make_database_dump_filename(dump_path, database['name'])
if os.path.exists(dump_filename): if os.path.exists(dump_filename):
logger.warning( logger.warning(
@ -65,28 +65,26 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
return processes return processes
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover def remove_database_dumps(databases, config, log_prefix, dry_run): # pragma: no cover
''' '''
Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. Use the given log prefix in sequence of configuration dicts, as per the configuration schema. Use the given configuration
any log entries. Use the given location configuration dict to construct the destination path. dict to construct the destination path and the given log prefix in any log entries. If this is a
If this is a dry run, then don't actually remove anything. dry run, then don't actually remove anything.
''' '''
dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run) dump.remove_database_dumps(make_dump_path(config), 'SQLite', log_prefix, dry_run)
def make_database_dump_pattern( def make_database_dump_pattern(databases, config, log_prefix, name=None): # pragma: no cover
databases, log_prefix, location_config, name=None
): # pragma: no cover
''' '''
Make a pattern that matches the given SQLite3 databases. The databases are supplied as a Make a pattern that matches the given SQLite3 databases. The databases are supplied as a
sequence of configuration dicts, as per the configuration schema. sequence of configuration dicts, as per the configuration schema.
''' '''
return dump.make_database_dump_filename(make_dump_path(location_config), name) return dump.make_database_dump_filename(make_dump_path(config), name)
def restore_database_dump( def restore_database_dump(
database_config, log_prefix, location_config, dry_run, extract_process, connection_params database_config, config, log_prefix, dry_run, extract_process, connection_params
): ):
''' '''
Restore the given SQLite3 database from an extract stream. The database is supplied as a Restore the given SQLite3 database from an extract stream. The database is supplied as a

View file

@ -84,7 +84,6 @@ def test_prune_archives_command_does_not_duplicate_flags_or_raise():
False, False,
'repo', 'repo',
{}, {},
{},
'2.3.4', '2.3.4',
fuzz_argument(arguments, argument_name), fuzz_argument(arguments, argument_name),
argparse.Namespace(log_json=False), argparse.Namespace(log_json=False),

View file

@ -13,43 +13,43 @@ def test_insert_newline_before_comment_does_not_raise():
config = module.yaml.comments.CommentedMap([(field_name, 33)]) config = module.yaml.comments.CommentedMap([(field_name, 33)])
config.yaml_set_comment_before_after_key(key=field_name, before='Comment') config.yaml_set_comment_before_after_key(key=field_name, before='Comment')
module._insert_newline_before_comment(config, field_name) module.insert_newline_before_comment(config, field_name)
def test_comment_out_line_skips_blank_line(): def test_comment_out_line_skips_blank_line():
line = ' \n' line = ' \n'
assert module._comment_out_line(line) == line assert module.comment_out_line(line) == line
def test_comment_out_line_skips_already_commented_out_line(): def test_comment_out_line_skips_already_commented_out_line():
line = ' # foo' line = ' # foo'
assert module._comment_out_line(line) == line assert module.comment_out_line(line) == line
def test_comment_out_line_comments_section_name(): def test_comment_out_line_comments_section_name():
line = 'figgy-pudding:' line = 'figgy-pudding:'
assert module._comment_out_line(line) == '# ' + line assert module.comment_out_line(line) == '# ' + line
def test_comment_out_line_comments_indented_option(): def test_comment_out_line_comments_indented_option():
line = ' enabled: true' line = ' enabled: true'
assert module._comment_out_line(line) == ' # enabled: true' assert module.comment_out_line(line) == ' # enabled: true'
def test_comment_out_line_comments_twice_indented_option(): def test_comment_out_line_comments_twice_indented_option():
line = ' - item' line = ' - item'
assert module._comment_out_line(line) == ' # - item' assert module.comment_out_line(line) == ' # - item'
def test_comment_out_optional_configuration_comments_optional_config_only(): def test_comment_out_optional_configuration_comments_optional_config_only():
# The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional. # The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional.
# It's stripped out of the final output. # It's stripped out of the final output.
flexmock(module)._comment_out_line = lambda line: '# ' + line flexmock(module).comment_out_line = lambda line: '# ' + line
config = ''' config = '''
# COMMENT_OUT # COMMENT_OUT
foo: foo:
@ -84,7 +84,7 @@ location:
# other: thing # other: thing
''' '''
assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip() assert module.comment_out_optional_configuration(config.strip()) == expected_config.strip()
def test_render_configuration_converts_configuration_to_yaml_string(): def test_render_configuration_converts_configuration_to_yaml_string():
@ -204,10 +204,10 @@ def test_generate_sample_configuration_does_not_raise():
builtins = flexmock(sys.modules['builtins']) builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_return('') builtins.should_receive('open').with_args('schema.yaml').and_return('')
flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration') flexmock(module).should_receive('write_configuration')
module.generate_sample_configuration(False, None, 'dest.yaml', 'schema.yaml') module.generate_sample_configuration(False, None, 'dest.yaml', 'schema.yaml')
@ -219,10 +219,10 @@ def test_generate_sample_configuration_with_source_filename_does_not_raise():
flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module.load).should_receive('load_configuration') flexmock(module.load).should_receive('load_configuration')
flexmock(module.normalize).should_receive('normalize') flexmock(module.normalize).should_receive('normalize')
flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration') flexmock(module).should_receive('write_configuration')
module.generate_sample_configuration(False, 'source.yaml', 'dest.yaml', 'schema.yaml') module.generate_sample_configuration(False, 'source.yaml', 'dest.yaml', 'schema.yaml')
@ -232,10 +232,10 @@ def test_generate_sample_configuration_with_dry_run_does_not_write_file():
builtins = flexmock(sys.modules['builtins']) builtins = flexmock(sys.modules['builtins'])
builtins.should_receive('open').with_args('schema.yaml').and_return('') builtins.should_receive('open').with_args('schema.yaml').and_return('')
flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.yaml).should_receive('round_trip_load')
flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('schema_to_sample_configuration')
flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('merge_source_configuration_into_destination')
flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('render_configuration')
flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('comment_out_optional_configuration')
flexmock(module).should_receive('write_configuration').never() flexmock(module).should_receive('write_configuration').never()
module.generate_sample_configuration(True, None, 'dest.yaml', 'schema.yaml') module.generate_sample_configuration(True, None, 'dest.yaml', 'schema.yaml')

View file

@ -40,35 +40,32 @@ def mock_config_and_schema(config_yaml, schema_yaml=None):
def test_parse_configuration_transforms_file_into_mapping(): def test_parse_configuration_transforms_file_into_mapping():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home - /etc
- /etc
repositories: repositories:
- path: hostname.borg - path: hostname.borg
retention: keep_minutely: 60
keep_minutely: 60 keep_hourly: 24
keep_hourly: 24 keep_daily: 7
keep_daily: 7
consistency: checks:
checks: - name: repository
- name: repository - name: archives
- name: archives
''' '''
) )
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': { 'source_directories': ['/home', '/etc'],
'source_directories': ['/home', '/etc'], 'repositories': [{'path': 'hostname.borg'}],
'repositories': [{'path': 'hostname.borg'}], 'keep_daily': 7,
}, 'keep_hourly': 24,
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60}, 'keep_minutely': 60,
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]}, 'checks': [{'name': 'repository'}, {'name': 'archives'}],
} }
assert logs == [] assert logs == []
@ -78,22 +75,19 @@ def test_parse_configuration_passes_through_quoted_punctuation():
mock_config_and_schema( mock_config_and_schema(
f''' f'''
location: source_directories:
source_directories: - "/home/{escaped_punctuation}"
- "/home/{escaped_punctuation}"
repositories: repositories:
- path: test.borg - path: test.borg
''' '''
) )
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': { 'source_directories': [f'/home/{string.punctuation}'],
'source_directories': [f'/home/{string.punctuation}'], 'repositories': [{'path': 'test.borg'}],
'repositories': [{'path': 'test.borg'}],
}
} }
assert logs == [] assert logs == []
@ -101,26 +95,22 @@ def test_parse_configuration_passes_through_quoted_punctuation():
def test_parse_configuration_with_schema_lacking_examples_does_not_raise(): def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
''', ''',
''' '''
map: map:
location: source_directories:
required: true required: true
map: seq:
source_directories: - type: scalar
required: true repositories:
seq: required: true
- type: scalar seq:
repositories: - type: scalar
required: true
seq:
- type: scalar
''', ''',
) )
@ -130,12 +120,11 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
def test_parse_configuration_inlines_include(): def test_parse_configuration_inlines_include():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
retention: retention:
!include include.yaml !include include.yaml
@ -154,25 +143,25 @@ def test_parse_configuration_inlines_include():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]}, 'source_directories': ['/home'],
'retention': {'keep_daily': 7, 'keep_hourly': 24}, 'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 7,
'keep_hourly': 24,
} }
assert logs == [] assert len(logs) == 1
def test_parse_configuration_merges_include(): def test_parse_configuration_merges_include():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
retention: keep_daily: 1
keep_daily: 1 <<: !include include.yaml
<<: !include include.yaml
''' '''
) )
builtins = flexmock(sys.modules['builtins']) builtins = flexmock(sys.modules['builtins'])
@ -188,8 +177,10 @@ def test_parse_configuration_merges_include():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': {'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}]}, 'source_directories': ['/home'],
'retention': {'keep_daily': 1, 'keep_hourly': 24}, 'repositories': [{'path': 'hostname.borg'}],
'keep_daily': 1,
'keep_hourly': 24,
} }
assert logs == [] assert logs == []
@ -218,10 +209,9 @@ def test_parse_configuration_raises_for_syntax_error():
def test_parse_configuration_raises_for_validation_error(): def test_parse_configuration_raises_for_validation_error():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories: yes
source_directories: yes repositories:
repositories: - path: hostname.borg
- path: hostname.borg
''' '''
) )
@ -232,14 +222,13 @@ def test_parse_configuration_raises_for_validation_error():
def test_parse_configuration_applies_overrides(): def test_parse_configuration_applies_overrides():
mock_config_and_schema( mock_config_and_schema(
''' '''
location: source_directories:
source_directories: - /home
- /home
repositories: repositories:
- path: hostname.borg - path: hostname.borg
local_path: borg1 local_path: borg1
''' '''
) )
@ -248,11 +237,9 @@ def test_parse_configuration_applies_overrides():
) )
assert config == { assert config == {
'location': { 'source_directories': ['/home'],
'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}],
'repositories': [{'path': 'hostname.borg'}], 'local_path': 'borg2',
'local_path': 'borg2',
}
} }
assert logs == [] assert logs == []
@ -274,10 +261,8 @@ def test_parse_configuration_applies_normalization():
config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml')
assert config == { assert config == {
'location': { 'source_directories': ['/home'],
'source_directories': ['/home'], 'repositories': [{'path': 'hostname.borg'}],
'repositories': [{'path': 'hostname.borg'}], 'exclude_if_present': ['.nobackup'],
'exclude_if_present': ['.nobackup'],
}
} }
assert logs assert logs

View file

@ -14,7 +14,7 @@ def test_run_borg_does_not_raise():
module.run_borg( module.run_borg(
repository={'path': 'repos'}, repository={'path': 'repos'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
borg_arguments=borg_arguments, borg_arguments=borg_arguments,

View file

@ -11,7 +11,7 @@ def test_run_break_lock_does_not_raise():
module.run_break_lock( module.run_break_lock(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
break_lock_arguments=break_lock_arguments, break_lock_arguments=break_lock_arguments,
global_arguments=flexmock(), global_arguments=flexmock(),

View file

@ -5,9 +5,6 @@ from borgmatic.actions import check as module
def test_run_check_calls_hooks_for_configured_repository(): def test_run_check_calls_hooks_for_configured_repository():
flexmock(module.logger).answer = lambda message: None flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.checks).should_receive(
'repository_enabled_for_checks'
).and_return(True)
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never()
flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module.borgmatic.borg.check).should_receive('check_archives').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
@ -23,10 +20,7 @@ def test_run_check_calls_hooks_for_configured_repository():
module.run_check( module.run_check(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
check_arguments=check_arguments, check_arguments=check_arguments,
@ -54,10 +48,7 @@ def test_run_check_runs_with_selected_repository():
module.run_check( module.run_check(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
check_arguments=check_arguments, check_arguments=check_arguments,
@ -85,10 +76,7 @@ def test_run_check_bails_if_repository_does_not_match():
module.run_check( module.run_check(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
check_arguments=check_arguments, check_arguments=check_arguments,

View file

@ -17,9 +17,7 @@ def test_compact_actions_calls_hooks_for_configured_repository():
module.run_compact( module.run_compact(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
compact_arguments=compact_arguments, compact_arguments=compact_arguments,
@ -45,9 +43,7 @@ def test_compact_runs_with_selected_repository():
module.run_compact( module.run_compact(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
compact_arguments=compact_arguments, compact_arguments=compact_arguments,
@ -73,9 +69,7 @@ def test_compact_bails_if_repository_does_not_match():
module.run_compact( module.run_compact(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
compact_arguments=compact_arguments, compact_arguments=compact_arguments,

View file

@ -28,9 +28,7 @@ def test_run_create_executes_and_calls_hooks_for_configured_repository():
module.run_create( module.run_create(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={}, config={},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
create_arguments=create_arguments, create_arguments=create_arguments,
@ -49,6 +47,11 @@ def test_run_create_runs_with_selected_repository():
).once().and_return(True) ).once().and_return(True)
flexmock(module.borgmatic.borg.create).should_receive('create_archive').once() flexmock(module.borgmatic.borg.create).should_receive('create_archive').once()
flexmock(module).should_receive('create_borgmatic_manifest').once() flexmock(module).should_receive('create_borgmatic_manifest').once()
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return({})
flexmock(module.borgmatic.hooks.dispatch).should_receive(
'call_hooks_even_if_unconfigured'
).and_return({})
create_arguments = flexmock( create_arguments = flexmock(
repository=flexmock(), repository=flexmock(),
progress=flexmock(), progress=flexmock(),
@ -62,9 +65,7 @@ def test_run_create_runs_with_selected_repository():
module.run_create( module.run_create(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={}, config={},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
create_arguments=create_arguments, create_arguments=create_arguments,
@ -96,9 +97,7 @@ def test_run_create_bails_if_repository_does_not_match():
module.run_create( module.run_create(
config_filename='test.yaml', config_filename='test.yaml',
repository='repo', repository='repo',
location={}, config={},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
create_arguments=create_arguments, create_arguments=create_arguments,

View file

@ -20,7 +20,7 @@ def test_run_export_tar_does_not_raise():
module.run_export_tar( module.run_export_tar(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
export_tar_arguments=export_tar_arguments, export_tar_arguments=export_tar_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -21,9 +21,7 @@ def test_run_extract_calls_hooks():
module.run_extract( module.run_extract(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': ['repo']}, config={'repositories': ['repo']},
storage={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
extract_arguments=extract_arguments, extract_arguments=extract_arguments,

View file

@ -18,7 +18,7 @@ def test_run_info_does_not_raise():
list( list(
module.run_info( module.run_info(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
info_arguments=info_arguments, info_arguments=info_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -18,7 +18,7 @@ def test_run_list_does_not_raise():
list( list(
module.run_list( module.run_list(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -18,7 +18,7 @@ def test_run_mount_does_not_raise():
module.run_mount( module.run_mount(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -14,9 +14,7 @@ def test_run_prune_calls_hooks_for_configured_repository():
module.run_prune( module.run_prune(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -39,9 +37,7 @@ def test_run_prune_runs_with_selected_repository():
module.run_prune( module.run_prune(
config_filename='test.yaml', config_filename='test.yaml',
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -64,9 +60,7 @@ def test_run_prune_bails_if_repository_does_not_match():
module.run_prune( module.run_prune(
config_filename='test.yaml', config_filename='test.yaml',
repository='repo', repository='repo',
storage={}, config={},
retention={},
hooks={},
hook_context={}, hook_context={},
local_borg_version=None, local_borg_version=None,
prune_arguments=prune_arguments, prune_arguments=prune_arguments,

View file

@ -19,7 +19,7 @@ def test_run_rcreate_does_not_raise():
module.run_rcreate( module.run_rcreate(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rcreate_arguments=arguments, rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
@ -46,7 +46,7 @@ def test_run_rcreate_bails_if_repository_does_not_match():
module.run_rcreate( module.run_rcreate(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rcreate_arguments=arguments, rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),

View file

@ -6,7 +6,7 @@ import borgmatic.actions.restore as module
def test_get_configured_database_matches_database_by_name(): def test_get_configured_database_matches_database_by_name():
assert module.get_configured_database( assert module.get_configured_database(
hooks={ config={
'other_databases': [{'name': 'other'}], 'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
}, },
@ -18,7 +18,7 @@ def test_get_configured_database_matches_database_by_name():
def test_get_configured_database_matches_nothing_when_database_name_not_configured(): def test_get_configured_database_matches_nothing_when_database_name_not_configured():
assert module.get_configured_database( assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']}, archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='quux', database_name='quux',
@ -27,7 +27,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_configur
def test_get_configured_database_matches_nothing_when_database_name_not_in_archive(): def test_get_configured_database_matches_nothing_when_database_name_not_in_archive():
assert module.get_configured_database( assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, config={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['bar']}, archive_database_names={'postgresql_databases': ['bar']},
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
@ -36,7 +36,7 @@ def test_get_configured_database_matches_nothing_when_database_name_not_in_archi
def test_get_configured_database_matches_database_by_configuration_database_name(): def test_get_configured_database_matches_database_by_configuration_database_name():
assert module.get_configured_database( assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]}, config={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']}, archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
@ -46,7 +46,7 @@ def test_get_configured_database_matches_database_by_configuration_database_name
def test_get_configured_database_with_unspecified_hook_matches_database_by_name(): def test_get_configured_database_with_unspecified_hook_matches_database_by_name():
assert module.get_configured_database( assert module.get_configured_database(
hooks={ config={
'other_databases': [{'name': 'other'}], 'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
}, },
@ -69,8 +69,7 @@ def test_collect_archive_database_names_parses_archive_paths():
archive_database_names = module.collect_archive_database_names( archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'}, repository={'path': 'repo'},
archive='archive', archive='archive',
location={'borgmatic_source_directory': '.borgmatic'}, config={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path=flexmock(), local_path=flexmock(),
@ -95,8 +94,7 @@ def test_collect_archive_database_names_parses_directory_format_archive_paths():
archive_database_names = module.collect_archive_database_names( archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'}, repository={'path': 'repo'},
archive='archive', archive='archive',
location={'borgmatic_source_directory': '.borgmatic'}, config={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path=flexmock(), local_path=flexmock(),
@ -117,8 +115,7 @@ def test_collect_archive_database_names_skips_bad_archive_paths():
archive_database_names = module.collect_archive_database_names( archive_database_names = module.collect_archive_database_names(
repository={'path': 'repo'}, repository={'path': 'repo'},
archive='archive', archive='archive',
location={'borgmatic_source_directory': '.borgmatic'}, config={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path=flexmock(), local_path=flexmock(),
@ -231,9 +228,7 @@ def test_run_restore_restores_each_database():
).and_return(('postgresql_databases', {'name': 'bar'})) ).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -245,9 +240,7 @@ def test_run_restore_restores_each_database():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -261,9 +254,7 @@ def test_run_restore_restores_each_database():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',
@ -293,9 +284,7 @@ def test_run_restore_bails_for_non_matching_repository():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False), global_arguments=flexmock(dry_run=False),
@ -317,19 +306,19 @@ def test_run_restore_restores_database_configured_with_all_name():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'})) ).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
).and_return((None, None)) ).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
@ -337,9 +326,7 @@ def test_run_restore_restores_database_configured_with_all_name():
).and_return(('postgresql_databases', {'name': 'bar'})) ).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -351,9 +338,7 @@ def test_run_restore_restores_database_configured_with_all_name():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -367,9 +352,7 @@ def test_run_restore_restores_database_configured_with_all_name():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',
@ -401,19 +384,19 @@ def test_run_restore_skips_missing_database():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'})) ).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
).and_return((None, None)) ).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='bar', database_name='bar',
@ -421,9 +404,7 @@ def test_run_restore_skips_missing_database():
).and_return((None, None)) ).and_return((None, None))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -435,9 +416,7 @@ def test_run_restore_skips_missing_database():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -451,9 +430,7 @@ def test_run_restore_skips_missing_database():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',
@ -486,22 +463,20 @@ def test_run_restore_restores_databases_from_different_hooks():
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='postgresql_databases', hook_name='postgresql_databases',
database_name='foo', database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'})) ).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args( flexmock(module).should_receive('get_configured_database').with_args(
hooks=object, config=object,
archive_database_names=object, archive_database_names=object,
hook_name='mysql_databases', hook_name='mysql_databases',
database_name='bar', database_name='bar',
).and_return(('mysql_databases', {'name': 'bar'})) ).and_return(('mysql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -513,9 +488,7 @@ def test_run_restore_restores_databases_from_different_hooks():
).once() ).once()
flexmock(module).should_receive('restore_single_database').with_args( flexmock(module).should_receive('restore_single_database').with_args(
repository=object, repository=object,
location=object, config=object,
storage=object,
hooks=object,
local_borg_version=object, local_borg_version=object,
global_arguments=object, global_arguments=object,
local_path=object, local_path=object,
@ -529,9 +502,7 @@ def test_run_restore_restores_databases_from_different_hooks():
module.run_restore( module.run_restore(
repository={'path': 'repo'}, repository={'path': 'repo'},
location=flexmock(), config=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
restore_arguments=flexmock( restore_arguments=flexmock(
repository='repo', repository='repo',

View file

@ -12,7 +12,7 @@ def test_run_rinfo_does_not_raise():
list( list(
module.run_rinfo( module.run_rinfo(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rinfo_arguments=rinfo_arguments, rinfo_arguments=rinfo_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -12,7 +12,7 @@ def test_run_rlist_does_not_raise():
list( list(
module.run_rlist( module.run_rlist(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=flexmock(), global_arguments=flexmock(),

View file

@ -11,7 +11,7 @@ def test_run_transfer_does_not_raise():
module.run_transfer( module.run_transfer(
repository={'path': 'repo'}, repository={'path': 'repo'},
storage={}, config={},
local_borg_version=None, local_borg_version=None,
transfer_arguments=transfer_arguments, transfer_arguments=transfer_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,

View file

@ -22,7 +22,7 @@ def test_run_arbitrary_borg_calls_borg_with_flags():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -44,7 +44,7 @@ def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_flag():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -66,7 +66,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -75,7 +75,7 @@ def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_flag():
def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags(): def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module.flags).should_receive('make_flags').and_return(()).and_return( flexmock(module.flags).should_receive('make_flags').and_return(()).and_return(
('--lock-wait', '5') ('--lock-wait', '5')
) )
@ -90,7 +90,7 @@ def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_flags():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
) )
@ -111,7 +111,7 @@ def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_flag():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::$ARCHIVE'], options=['break-lock', '::$ARCHIVE'],
archive='archive', archive='archive',
@ -133,7 +133,7 @@ def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
local_path='borg1', local_path='borg1',
@ -157,7 +157,7 @@ def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_flags()
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['break-lock', '::'], options=['break-lock', '::'],
remote_path='borg1', remote_path='borg1',
@ -179,7 +179,7 @@ def test_run_arbitrary_borg_passes_borg_specific_flags_to_borg():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['list', '--progress', '::'], options=['list', '--progress', '::'],
) )
@ -200,7 +200,7 @@ def test_run_arbitrary_borg_omits_dash_dash_in_flags_passed_to_borg():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['--', 'break-lock', '::'], options=['--', 'break-lock', '::'],
) )
@ -221,7 +221,7 @@ def test_run_arbitrary_borg_without_borg_specific_flags_does_not_raise():
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=[], options=[],
) )
@ -243,7 +243,7 @@ def test_run_arbitrary_borg_passes_key_sub_command_to_borg_before_injected_flags
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['key', 'export', '::'], options=['key', 'export', '::'],
) )
@ -265,7 +265,7 @@ def test_run_arbitrary_borg_passes_debug_sub_command_to_borg_before_injected_fla
module.run_arbitrary_borg( module.run_arbitrary_borg(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
options=['debug', 'dump-manifest', '::', 'path'], options=['debug', 'dump-manifest', '::', 'path'],
) )

View file

@ -22,7 +22,7 @@ def test_break_lock_calls_borg_with_required_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -34,7 +34,7 @@ def test_break_lock_calls_borg_with_remote_path_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -47,7 +47,7 @@ def test_break_lock_calls_borg_with_umask_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={'umask': '0770'}, config={'umask': '0770'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -59,7 +59,7 @@ def test_break_lock_calls_borg_with_log_json_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -71,7 +71,7 @@ def test_break_lock_calls_borg_with_lock_wait_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={'lock_wait': '5'}, config={'lock_wait': '5'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -84,7 +84,7 @@ def test_break_lock_with_log_info_calls_borg_with_info_parameter():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -97,7 +97,7 @@ def test_break_lock_with_log_debug_calls_borg_with_debug_flags():
module.break_lock( module.break_lock(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -99,8 +99,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks():
flexmock(module).should_receive('probe_for_check_time').and_return(None) flexmock(module).should_receive('probe_for_check_time').and_return(None)
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={},
consistency_config={},
borg_repository_id='repo', borg_repository_id='repo',
checks=('repository', 'archives'), checks=('repository', 'archives'),
force=False, force=False,
@ -110,8 +109,7 @@ def test_filter_checks_on_frequency_without_config_uses_default_checks():
def test_filter_checks_on_frequency_retains_unconfigured_check(): def test_filter_checks_on_frequency_retains_unconfigured_check():
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={},
consistency_config={},
borg_repository_id='repo', borg_repository_id='repo',
checks=('data',), checks=('data',),
force=False, force=False,
@ -122,8 +120,7 @@ def test_filter_checks_on_frequency_retains_check_without_frequency():
flexmock(module).should_receive('parse_frequency').and_return(None) flexmock(module).should_receive('parse_frequency').and_return(None)
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives'}]},
consistency_config={'checks': [{'name': 'archives'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -141,8 +138,7 @@ def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
) )
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -158,8 +154,7 @@ def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file()
flexmock(module).should_receive('probe_for_check_time').and_return(None) flexmock(module).should_receive('probe_for_check_time').and_return(None)
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -178,8 +173,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
assert ( assert (
module.filter_checks_on_frequency( module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=False, force=False,
@ -191,8 +185,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force(): def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force():
assert module.filter_checks_on_frequency( assert module.filter_checks_on_frequency(
location_config={}, config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
borg_repository_id='repo', borg_repository_id='repo',
checks=('archives',), checks=('archives',),
force=True, force=True,
@ -616,7 +609,7 @@ def test_upgrade_check_times_renames_stale_temporary_check_path():
def test_check_archives_with_progress_calls_borg_with_progress_parameter(): def test_check_archives_with_progress_calls_borg_with_progress_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -639,9 +632,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
progress=True, progress=True,
@ -650,7 +641,7 @@ def test_check_archives_with_progress_calls_borg_with_progress_parameter():
def test_check_archives_with_repair_calls_borg_with_repair_parameter(): def test_check_archives_with_repair_calls_borg_with_repair_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -673,9 +664,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repair=True, repair=True,
@ -693,7 +682,7 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
) )
def test_check_archives_calls_borg_with_parameters(checks): def test_check_archives_calls_borg_with_parameters(checks):
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -710,9 +699,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -721,7 +708,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
def test_check_archives_with_json_error_raises(): def test_check_archives_with_json_error_raises():
checks = ('archives',) checks = ('archives',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"unexpected": {"id": "repo"}}' '{"unexpected": {"id": "repo"}}'
) )
@ -734,9 +721,7 @@ def test_check_archives_with_json_error_raises():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -745,7 +730,7 @@ def test_check_archives_with_json_error_raises():
def test_check_archives_with_missing_json_keys_raises(): def test_check_archives_with_missing_json_keys_raises():
checks = ('archives',) checks = ('archives',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON') flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON')
flexmock(module).should_receive('upgrade_check_times') flexmock(module).should_receive('upgrade_check_times')
flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('parse_checks')
@ -756,9 +741,7 @@ def test_check_archives_with_missing_json_keys_raises():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -767,7 +750,7 @@ def test_check_archives_with_missing_json_keys_raises():
def test_check_archives_with_extract_check_calls_extract_only(): def test_check_archives_with_extract_check_calls_extract_only():
checks = ('extract',) checks = ('extract',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -784,9 +767,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -794,7 +775,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
def test_check_archives_with_log_info_calls_borg_with_info_parameter(): def test_check_archives_with_log_info_calls_borg_with_info_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -812,9 +793,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -822,7 +801,7 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter(): def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -840,16 +819,14 @@ def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
def test_check_archives_without_any_checks_bails(): def test_check_archives_without_any_checks_bails():
consistency_config = {'check_last': None} config = {'check_last': None}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -862,9 +839,7 @@ def test_check_archives_without_any_checks_bails():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -873,7 +848,7 @@ def test_check_archives_without_any_checks_bails():
def test_check_archives_with_local_path_calls_borg_via_local_path(): def test_check_archives_with_local_path_calls_borg_via_local_path():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -890,9 +865,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path='borg1', local_path='borg1',
@ -902,7 +875,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(): def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -919,9 +892,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -931,8 +902,7 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
def test_check_archives_with_log_json_calls_borg_with_log_json_parameters(): def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
storage_config = {} config = {'check_last': check_last}
consistency_config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -949,9 +919,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config=storage_config,
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -960,8 +928,7 @@ def test_check_archives_with_log_json_calls_borg_with_log_json_parameters():
def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
storage_config = {'lock_wait': 5} config = {'lock_wait': 5, 'check_last': check_last}
consistency_config = {'check_last': check_last}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -978,9 +945,7 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config=storage_config,
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -990,7 +955,7 @@ def test_check_archives_with_retention_prefix():
checks = ('repository',) checks = ('repository',)
check_last = flexmock() check_last = flexmock()
prefix = 'foo-' prefix = 'foo-'
consistency_config = {'check_last': check_last, 'prefix': prefix} config = {'check_last': check_last, 'prefix': prefix}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -1007,9 +972,7 @@ def test_check_archives_with_retention_prefix():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -1017,7 +980,7 @@ def test_check_archives_with_retention_prefix():
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options(): def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
checks = ('repository',) checks = ('repository',)
consistency_config = {'check_last': None} config = {'check_last': None, 'extra_borg_options': {'check': '--extra --options'}}
flexmock(module.rinfo).should_receive('display_repository_info').and_return( flexmock(module.rinfo).should_receive('display_repository_info').and_return(
'{"repository": {"id": "repo"}}' '{"repository": {"id": "repo"}}'
) )
@ -1034,9 +997,7 @@ def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
module.check_archives( module.check_archives(
repository_path='repo', repository_path='repo',
location_config={}, config=config,
storage_config={'extra_borg_options': {'check': '--extra --options'}},
consistency_config=consistency_config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -27,7 +27,7 @@ def test_compact_segments_calls_borg_with_parameters():
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -40,7 +40,7 @@ def test_compact_segments_with_log_info_calls_borg_with_info_parameter():
module.compact_segments( module.compact_segments(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
dry_run=False, dry_run=False,
@ -54,7 +54,7 @@ def test_compact_segments_with_log_debug_calls_borg_with_debug_parameter():
module.compact_segments( module.compact_segments(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
dry_run=False, dry_run=False,
@ -66,7 +66,7 @@ def test_compact_segments_with_dry_run_skips_borg_call():
module.compact_segments( module.compact_segments(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
dry_run=True, dry_run=True,
@ -80,7 +80,7 @@ def test_compact_segments_with_local_path_calls_borg_via_local_path():
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path='borg1', local_path='borg1',
@ -94,7 +94,7 @@ def test_compact_segments_with_remote_path_calls_borg_with_remote_path_parameter
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -108,7 +108,7 @@ def test_compact_segments_with_progress_calls_borg_with_progress_parameter():
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
progress=True, progress=True,
@ -122,7 +122,7 @@ def test_compact_segments_with_cleanup_commits_calls_borg_with_cleanup_commits_p
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
cleanup_commits=True, cleanup_commits=True,
@ -136,7 +136,7 @@ def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter():
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
threshold=20, threshold=20,
@ -144,14 +144,14 @@ def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter():
def test_compact_segments_with_umask_calls_borg_with_umask_parameters(): def test_compact_segments_with_umask_calls_borg_with_umask_parameters():
storage_config = {'umask': '077'} config = {'umask': '077'}
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(COMPACT_COMMAND + ('--umask', '077', 'repo'), logging.INFO) insert_execute_command_mock(COMPACT_COMMAND + ('--umask', '077', 'repo'), logging.INFO)
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -164,21 +164,21 @@ def test_compact_segments_with_log_json_calls_borg_with_log_json_parameters():
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
def test_compact_segments_with_lock_wait_calls_borg_with_lock_wait_parameters(): def test_compact_segments_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(COMPACT_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO) insert_execute_command_mock(COMPACT_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO)
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -191,7 +191,7 @@ def test_compact_segments_with_extra_borg_options_calls_borg_with_extra_options(
module.compact_segments( module.compact_segments(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'extra_borg_options': {'compact': '--extra --options'}}, config={'extra_borg_options': {'compact': '--extra --options'}},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -170,79 +170,75 @@ def test_ensure_files_readable_opens_filenames(filename_lists, opened_filenames)
def test_make_pattern_flags_includes_pattern_filename_when_given(): def test_make_pattern_flags_includes_pattern_filename_when_given():
pattern_flags = module.make_pattern_flags( pattern_flags = module.make_pattern_flags(
location_config={'patterns': ['R /', '- /var']}, pattern_filename='/tmp/patterns' config={'patterns': ['R /', '- /var']}, pattern_filename='/tmp/patterns'
) )
assert pattern_flags == ('--patterns-from', '/tmp/patterns') assert pattern_flags == ('--patterns-from', '/tmp/patterns')
def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config(): def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config():
pattern_flags = module.make_pattern_flags( pattern_flags = module.make_pattern_flags(config={'patterns_from': ['patterns', 'other']})
location_config={'patterns_from': ['patterns', 'other']}
)
assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', 'other') assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', 'other')
def test_make_pattern_flags_includes_both_filenames_when_patterns_given_and_patterns_from_in_config(): def test_make_pattern_flags_includes_both_filenames_when_patterns_given_and_patterns_from_in_config():
pattern_flags = module.make_pattern_flags( pattern_flags = module.make_pattern_flags(
location_config={'patterns_from': ['patterns']}, pattern_filename='/tmp/patterns' config={'patterns_from': ['patterns']}, pattern_filename='/tmp/patterns'
) )
assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', '/tmp/patterns') assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', '/tmp/patterns')
def test_make_pattern_flags_considers_none_patterns_from_filenames_as_empty(): def test_make_pattern_flags_considers_none_patterns_from_filenames_as_empty():
pattern_flags = module.make_pattern_flags(location_config={'patterns_from': None}) pattern_flags = module.make_pattern_flags(config={'patterns_from': None})
assert pattern_flags == () assert pattern_flags == ()
def test_make_exclude_flags_includes_exclude_patterns_filename_when_given(): def test_make_exclude_flags_includes_exclude_patterns_filename_when_given():
exclude_flags = module.make_exclude_flags( exclude_flags = module.make_exclude_flags(
location_config={'exclude_patterns': ['*.pyc', '/var']}, exclude_filename='/tmp/excludes' config={'exclude_patterns': ['*.pyc', '/var']}, exclude_filename='/tmp/excludes'
) )
assert exclude_flags == ('--exclude-from', '/tmp/excludes') assert exclude_flags == ('--exclude-from', '/tmp/excludes')
def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config(): def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config():
exclude_flags = module.make_exclude_flags( exclude_flags = module.make_exclude_flags(config={'exclude_from': ['excludes', 'other']})
location_config={'exclude_from': ['excludes', 'other']}
)
assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', 'other') assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', 'other')
def test_make_exclude_flags_includes_both_filenames_when_patterns_given_and_exclude_from_in_config(): def test_make_exclude_flags_includes_both_filenames_when_patterns_given_and_exclude_from_in_config():
exclude_flags = module.make_exclude_flags( exclude_flags = module.make_exclude_flags(
location_config={'exclude_from': ['excludes']}, exclude_filename='/tmp/excludes' config={'exclude_from': ['excludes']}, exclude_filename='/tmp/excludes'
) )
assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', '/tmp/excludes') assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', '/tmp/excludes')
def test_make_exclude_flags_considers_none_exclude_from_filenames_as_empty(): def test_make_exclude_flags_considers_none_exclude_from_filenames_as_empty():
exclude_flags = module.make_exclude_flags(location_config={'exclude_from': None}) exclude_flags = module.make_exclude_flags(config={'exclude_from': None})
assert exclude_flags == () assert exclude_flags == ()
def test_make_exclude_flags_includes_exclude_caches_when_true_in_config(): def test_make_exclude_flags_includes_exclude_caches_when_true_in_config():
exclude_flags = module.make_exclude_flags(location_config={'exclude_caches': True}) exclude_flags = module.make_exclude_flags(config={'exclude_caches': True})
assert exclude_flags == ('--exclude-caches',) assert exclude_flags == ('--exclude-caches',)
def test_make_exclude_flags_does_not_include_exclude_caches_when_false_in_config(): def test_make_exclude_flags_does_not_include_exclude_caches_when_false_in_config():
exclude_flags = module.make_exclude_flags(location_config={'exclude_caches': False}) exclude_flags = module.make_exclude_flags(config={'exclude_caches': False})
assert exclude_flags == () assert exclude_flags == ()
def test_make_exclude_flags_includes_exclude_if_present_when_in_config(): def test_make_exclude_flags_includes_exclude_if_present_when_in_config():
exclude_flags = module.make_exclude_flags( exclude_flags = module.make_exclude_flags(
location_config={'exclude_if_present': ['exclude_me', 'also_me']} config={'exclude_if_present': ['exclude_me', 'also_me']}
) )
assert exclude_flags == ( assert exclude_flags == (
@ -254,31 +250,31 @@ def test_make_exclude_flags_includes_exclude_if_present_when_in_config():
def test_make_exclude_flags_includes_keep_exclude_tags_when_true_in_config(): def test_make_exclude_flags_includes_keep_exclude_tags_when_true_in_config():
exclude_flags = module.make_exclude_flags(location_config={'keep_exclude_tags': True}) exclude_flags = module.make_exclude_flags(config={'keep_exclude_tags': True})
assert exclude_flags == ('--keep-exclude-tags',) assert exclude_flags == ('--keep-exclude-tags',)
def test_make_exclude_flags_does_not_include_keep_exclude_tags_when_false_in_config(): def test_make_exclude_flags_does_not_include_keep_exclude_tags_when_false_in_config():
exclude_flags = module.make_exclude_flags(location_config={'keep_exclude_tags': False}) exclude_flags = module.make_exclude_flags(config={'keep_exclude_tags': False})
assert exclude_flags == () assert exclude_flags == ()
def test_make_exclude_flags_includes_exclude_nodump_when_true_in_config(): def test_make_exclude_flags_includes_exclude_nodump_when_true_in_config():
exclude_flags = module.make_exclude_flags(location_config={'exclude_nodump': True}) exclude_flags = module.make_exclude_flags(config={'exclude_nodump': True})
assert exclude_flags == ('--exclude-nodump',) assert exclude_flags == ('--exclude-nodump',)
def test_make_exclude_flags_does_not_include_exclude_nodump_when_false_in_config(): def test_make_exclude_flags_does_not_include_exclude_nodump_when_false_in_config():
exclude_flags = module.make_exclude_flags(location_config={'exclude_nodump': False}) exclude_flags = module.make_exclude_flags(config={'exclude_nodump': False})
assert exclude_flags == () assert exclude_flags == ()
def test_make_exclude_flags_is_empty_when_config_has_no_excludes(): def test_make_exclude_flags_is_empty_when_config_has_no_excludes():
exclude_flags = module.make_exclude_flags(location_config={}) exclude_flags = module.make_exclude_flags(config={})
assert exclude_flags == () assert exclude_flags == ()
@ -504,12 +500,11 @@ def test_create_archive_calls_borg_with_parameters():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -548,12 +543,11 @@ def test_create_archive_calls_borg_with_environment():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -594,12 +588,11 @@ def test_create_archive_with_patterns_calls_borg_with_patterns_including_convert
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'patterns': ['pattern'], 'patterns': ['pattern'],
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -644,11 +637,10 @@ def test_create_archive_with_sources_and_used_config_paths_calls_borg_with_sourc
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=['/etc/borgmatic/config.yaml']), global_arguments=flexmock(log_json=False, used_config_paths=['/etc/borgmatic/config.yaml']),
) )
@ -689,12 +681,11 @@ def test_create_archive_with_exclude_patterns_calls_borg_with_excludes():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': ['exclude'], 'exclude_patterns': ['exclude'],
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -733,12 +724,11 @@ def test_create_archive_with_log_info_calls_borg_with_info_parameter():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -774,12 +764,11 @@ def test_create_archive_with_log_info_and_json_suppresses_most_borg_output():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
json=True, json=True,
@ -819,12 +808,11 @@ def test_create_archive_with_log_debug_calls_borg_with_debug_parameter():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -860,12 +848,11 @@ def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
json=True, json=True,
@ -904,12 +891,11 @@ def test_create_archive_with_dry_run_calls_borg_with_dry_run_parameter():
module.create_archive( module.create_archive(
dry_run=True, dry_run=True,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -950,12 +936,11 @@ def test_create_archive_with_stats_and_dry_run_calls_borg_without_stats_paramete
module.create_archive( module.create_archive(
dry_run=True, dry_run=True,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
stats=True, stats=True,
@ -994,12 +979,12 @@ def test_create_archive_with_checkpoint_interval_calls_borg_with_checkpoint_inte
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'checkpoint_interval': 600,
}, },
storage_config={'checkpoint_interval': 600},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1037,12 +1022,12 @@ def test_create_archive_with_checkpoint_volume_calls_borg_with_checkpoint_volume
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'checkpoint_volume': 1024,
}, },
storage_config={'checkpoint_volume': 1024},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1080,12 +1065,12 @@ def test_create_archive_with_chunker_params_calls_borg_with_chunker_params_param
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'chunker_params': '1,2,3,4',
}, },
storage_config={'chunker_params': '1,2,3,4'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1123,12 +1108,12 @@ def test_create_archive_with_compression_calls_borg_with_compression_parameters(
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'compression': 'rle',
}, },
storage_config={'compression': 'rle'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1172,12 +1157,12 @@ def test_create_archive_with_upload_rate_limit_calls_borg_with_upload_ratelimit_
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'upload_rate_limit': 100,
}, },
storage_config={'upload_rate_limit': 100},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1217,13 +1202,12 @@ def test_create_archive_with_working_directory_calls_borg_with_working_directory
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'working_directory': '/working/dir', 'working_directory': '/working/dir',
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1261,13 +1245,12 @@ def test_create_archive_with_one_file_system_calls_borg_with_one_file_system_par
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'one_file_system': True, 'one_file_system': True,
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1311,13 +1294,12 @@ def test_create_archive_with_numeric_ids_calls_borg_with_numeric_ids_parameter(
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'numeric_ids': True, 'numeric_ids': True,
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1365,13 +1347,12 @@ def test_create_archive_with_read_special_calls_borg_with_read_special_parameter
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'read_special': True, 'read_special': True,
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1421,13 +1402,12 @@ def test_create_archive_with_basic_option_calls_borg_with_corresponding_paramete
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
option_name: option_value, option_name: option_value,
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1476,13 +1456,12 @@ def test_create_archive_with_atime_option_calls_borg_with_corresponding_paramete
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'atime': option_value, 'atime': option_value,
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1531,13 +1510,12 @@ def test_create_archive_with_flags_option_calls_borg_with_corresponding_paramete
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'flags': option_value, 'flags': option_value,
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1575,13 +1553,12 @@ def test_create_archive_with_files_cache_calls_borg_with_files_cache_parameters(
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'files_cache': 'ctime,size', 'files_cache': 'ctime,size',
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1619,12 +1596,11 @@ def test_create_archive_with_local_path_calls_borg_via_local_path():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
local_path='borg1', local_path='borg1',
@ -1663,12 +1639,11 @@ def test_create_archive_with_remote_path_calls_borg_with_remote_path_parameters(
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
remote_path='borg1', remote_path='borg1',
@ -1707,12 +1682,12 @@ def test_create_archive_with_umask_calls_borg_with_umask_parameters():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'umask': 740,
}, },
storage_config={'umask': 740},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1750,12 +1725,11 @@ def test_create_archive_with_log_json_calls_borg_with_log_json_parameters():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True, used_config_paths=[]), global_arguments=flexmock(log_json=True, used_config_paths=[]),
) )
@ -1793,12 +1767,12 @@ def test_create_archive_with_lock_wait_calls_borg_with_lock_wait_parameters():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'lock_wait': 5,
}, },
storage_config={'lock_wait': 5},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -1836,12 +1810,11 @@ def test_create_archive_with_stats_calls_borg_with_stats_parameter_and_answer_ou
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
stats=True, stats=True,
@ -1880,12 +1853,11 @@ def test_create_archive_with_files_calls_borg_with_list_parameter_and_answer_out
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
list_files=True, list_files=True,
@ -1930,12 +1902,11 @@ def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_para
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
progress=True, progress=True,
@ -1974,12 +1945,11 @@ def test_create_archive_with_progress_calls_borg_with_progress_parameter():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
progress=True, progress=True,
@ -2035,12 +2005,11 @@ def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progr
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
progress=True, progress=True,
@ -2099,13 +2068,12 @@ def test_create_archive_with_stream_processes_ignores_read_special_false_and_log
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'read_special': False, 'read_special': False,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
stream_processes=processes, stream_processes=processes,
@ -2168,12 +2136,11 @@ def test_create_archive_with_stream_processes_adds_special_files_to_excludes():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
stream_processes=processes, stream_processes=processes,
@ -2232,13 +2199,12 @@ def test_create_archive_with_stream_processes_and_read_special_does_not_add_spec
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'read_special': True, 'read_special': True,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
stream_processes=processes, stream_processes=processes,
@ -2274,12 +2240,11 @@ def test_create_archive_with_json_calls_borg_with_json_parameter():
json_output = module.create_archive( json_output = module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
json=True, json=True,
@ -2317,12 +2282,11 @@ def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter()
json_output = module.create_archive( json_output = module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
json=True, json=True,
@ -2365,12 +2329,11 @@ def test_create_archive_with_source_directories_glob_expands():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo*'], 'source_directories': ['foo*'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2409,12 +2372,11 @@ def test_create_archive_with_non_matching_source_directories_glob_passes_through
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo*'], 'source_directories': ['foo*'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2452,12 +2414,11 @@ def test_create_archive_with_glob_calls_borg_with_expanded_directories():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo*'], 'source_directories': ['foo*'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2495,12 +2456,12 @@ def test_create_archive_with_archive_name_format_calls_borg_with_archive_name():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'archive_name_format': 'ARCHIVE_NAME',
}, },
storage_config={'archive_name_format': 'ARCHIVE_NAME'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2539,12 +2500,12 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003
}, },
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2583,12 +2544,12 @@ def test_create_archive_with_repository_accepts_borg_placeholders():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='{fqdn}', # noqa: FS003 repository_path='{fqdn}', # noqa: FS003
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['{fqdn}'], # noqa: FS003 'repositories': ['{fqdn}'], # noqa: FS003
'exclude_patterns': None, 'exclude_patterns': None,
'archive_name_format': 'Documents_{hostname}-{now}', # noqa: FS003
}, },
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2626,12 +2587,12 @@ def test_create_archive_with_extra_borg_options_calls_borg_with_extra_options():
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'extra_borg_options': {'create': '--extra --options'},
}, },
storage_config={'extra_borg_options': {'create': '--extra --options'}},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )
@ -2687,12 +2648,11 @@ def test_create_archive_with_stream_processes_calls_borg_with_processes_and_read
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
stream_processes=processes, stream_processes=processes,
@ -2712,13 +2672,12 @@ def test_create_archive_with_non_existent_directory_and_source_directories_must_
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
location_config={ config={
'source_directories': ['foo', 'bar'], 'source_directories': ['foo', 'bar'],
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
'source_directories_must_exist': True, 'source_directories_must_exist': True,
}, },
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False, used_config_paths=[]), global_arguments=flexmock(log_json=False, used_config_paths=[]),
) )

View file

@ -36,7 +36,7 @@ def test_export_tar_archive_calls_borg_with_path_parameters():
archive='archive', archive='archive',
paths=['path1', 'path2'], paths=['path1', 'path2'],
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -58,7 +58,7 @@ def test_export_tar_archive_calls_borg_with_local_path_parameters():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path='borg1', local_path='borg1',
@ -81,7 +81,7 @@ def test_export_tar_archive_calls_borg_with_remote_path_parameters():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -104,7 +104,7 @@ def test_export_tar_archive_calls_borg_with_umask_parameters():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={'umask': '0770'}, config={'umask': '0770'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -124,7 +124,7 @@ def test_export_tar_archive_calls_borg_with_log_json_parameter():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -146,7 +146,7 @@ def test_export_tar_archive_calls_borg_with_lock_wait_parameters():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={'lock_wait': '5'}, config={'lock_wait': '5'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -167,7 +167,7 @@ def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -190,7 +190,7 @@ def test_export_tar_archive_with_log_debug_calls_borg_with_debug_parameters():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -210,7 +210,7 @@ def test_export_tar_archive_calls_borg_with_dry_run_parameter():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -232,7 +232,7 @@ def test_export_tar_archive_calls_borg_with_tar_filter_parameters():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
tar_filter='bzip2', tar_filter='bzip2',
@ -256,7 +256,7 @@ def test_export_tar_archive_calls_borg_with_list_parameter():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
list_files=True, list_files=True,
@ -279,7 +279,7 @@ def test_export_tar_archive_calls_borg_with_strip_components_parameter():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
strip_components=5, strip_components=5,
@ -300,7 +300,7 @@ def test_export_tar_archive_skips_abspath_for_remote_repository_parameter():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='test.tar', destination_path='test.tar',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -320,7 +320,7 @@ def test_export_tar_archive_calls_borg_with_stdout_destination_path():
archive='archive', archive='archive',
paths=None, paths=None,
destination_path='-', destination_path='-',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -25,7 +25,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_last_archive():
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -38,7 +38,7 @@ def test_extract_last_archive_dry_run_without_any_archives_should_not_raise():
flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(('repo',))
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -55,7 +55,7 @@ def test_extract_last_archive_dry_run_with_log_info_calls_borg_with_info_paramet
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -74,7 +74,7 @@ def test_extract_last_archive_dry_run_with_log_debug_calls_borg_with_debug_param
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -90,7 +90,7 @@ def test_extract_last_archive_dry_run_calls_borg_via_local_path():
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -109,7 +109,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_remote_path_flags():
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -126,7 +126,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_log_json_flag():
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
repository_path='repo', repository_path='repo',
@ -144,7 +144,7 @@ def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_flags():
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
repository_path='repo', repository_path='repo',
@ -168,8 +168,7 @@ def test_extract_archive_calls_borg_with_path_flags():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=['path1', 'path2'], paths=['path1', 'path2'],
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -191,8 +190,7 @@ def test_extract_archive_calls_borg_with_remote_path_flags():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -222,8 +220,7 @@ def test_extract_archive_calls_borg_with_numeric_ids_parameter(feature_available
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={'numeric_ids': True}, config={'numeric_ids': True},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -245,8 +242,7 @@ def test_extract_archive_calls_borg_with_umask_flags():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={'umask': '0770'},
storage_config={'umask': '0770'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -265,8 +261,7 @@ def test_extract_archive_calls_borg_with_log_json_flags():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -288,8 +283,7 @@ def test_extract_archive_calls_borg_with_lock_wait_flags():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={'lock_wait': '5'},
storage_config={'lock_wait': '5'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -312,8 +306,7 @@ def test_extract_archive_with_log_info_calls_borg_with_info_parameter():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -338,8 +331,7 @@ def test_extract_archive_with_log_debug_calls_borg_with_debug_flags():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -361,8 +353,7 @@ def test_extract_archive_calls_borg_with_dry_run_parameter():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -384,8 +375,7 @@ def test_extract_archive_calls_borg_with_destination_path():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
destination_path='/dest', destination_path='/dest',
@ -408,8 +398,7 @@ def test_extract_archive_calls_borg_with_strip_components():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
strip_components=5, strip_components=5,
@ -442,8 +431,7 @@ def test_extract_archive_calls_borg_with_strip_components_calculated_from_all():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=['foo/bar/baz.txt', 'foo/bar.txt'], paths=['foo/bar/baz.txt', 'foo/bar.txt'],
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
strip_components='all', strip_components='all',
@ -467,8 +455,7 @@ def test_extract_archive_with_strip_components_all_and_no_paths_raises():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
strip_components='all', strip_components='all',
@ -497,8 +484,7 @@ def test_extract_archive_calls_borg_with_progress_parameter():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
progress=True, progress=True,
@ -514,8 +500,7 @@ def test_extract_archive_with_progress_and_extract_to_stdout_raises():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
progress=True, progress=True,
@ -548,8 +533,7 @@ def test_extract_archive_calls_borg_with_stdout_parameter_and_returns_process():
repository='repo', repository='repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
extract_to_stdout=True, extract_to_stdout=True,
@ -579,8 +563,7 @@ def test_extract_archive_skips_abspath_for_remote_repository():
repository='server:repo', repository='server:repo',
archive='archive', archive='archive',
paths=None, paths=None,
location_config={}, config={},
storage_config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -27,7 +27,7 @@ def test_display_archives_info_calls_borg_with_parameters():
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -53,7 +53,7 @@ def test_display_archives_info_with_log_info_calls_borg_with_info_parameter():
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -78,7 +78,7 @@ def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_outpu
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
json_output = module.display_archives_info( json_output = module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None),
@ -107,7 +107,7 @@ def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter():
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -132,7 +132,7 @@ def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_outp
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
json_output = module.display_archives_info( json_output = module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None),
@ -158,7 +158,7 @@ def test_display_archives_info_with_json_calls_borg_with_json_parameter():
json_output = module.display_archives_info( json_output = module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=True, prefix=None, match_archives=None),
@ -186,7 +186,7 @@ def test_display_archives_info_with_archive_calls_borg_with_match_archives_param
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive='archive', json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive='archive', json=False, prefix=None, match_archives=None),
@ -212,7 +212,7 @@ def test_display_archives_info_with_local_path_calls_borg_via_local_path():
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -242,7 +242,7 @@ def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_para
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -272,7 +272,7 @@ def test_display_archives_info_with_log_json_calls_borg_with_log_json_parameters
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -291,7 +291,7 @@ def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_paramete
).and_return(()) ).and_return(())
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'info', '--lock-wait', '5', '--repo', 'repo'), ('borg', 'info', '--lock-wait', '5', '--repo', 'repo'),
@ -302,7 +302,7 @@ def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_paramete
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -331,7 +331,7 @@ def test_display_archives_info_transforms_prefix_into_match_archives_parameters(
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix='foo'), info_arguments=flexmock(archive=None, json=False, prefix='foo'),
@ -360,7 +360,7 @@ def test_display_archives_info_prefers_prefix_over_archive_name_format():
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix='foo'), info_arguments=flexmock(archive=None, json=False, prefix='foo'),
@ -386,7 +386,7 @@ def test_display_archives_info_transforms_archive_name_format_into_match_archive
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives=None),
@ -412,7 +412,7 @@ def test_display_archives_with_match_archives_option_calls_borg_with_match_archi
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={ config={
'archive_name_format': 'bar-{now}', # noqa: FS003 'archive_name_format': 'bar-{now}', # noqa: FS003
'match_archives': 'sh:foo-*', 'match_archives': 'sh:foo-*',
}, },
@ -441,7 +441,7 @@ def test_display_archives_with_match_archives_flag_calls_borg_with_match_archive
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives='sh:foo-*'), info_arguments=flexmock(archive=None, json=False, prefix=None, match_archives='sh:foo-*'),
@ -471,7 +471,7 @@ def test_display_archives_info_passes_through_arguments_to_borg(argument_name):
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=flexmock( info_arguments=flexmock(
@ -523,7 +523,7 @@ def test_display_archives_info_with_date_based_matching_calls_borg_with_date_bas
) )
module.display_archives_info( module.display_archives_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
info_arguments=info_arguments, info_arguments=info_arguments,

View file

@ -17,7 +17,7 @@ def test_make_list_command_includes_log_info():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False), list_arguments=flexmock(archive=None, paths=None, json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -34,7 +34,7 @@ def test_make_list_command_includes_json_but_not_info():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=True), list_arguments=flexmock(archive=None, paths=None, json=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -51,7 +51,7 @@ def test_make_list_command_includes_log_debug():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False), list_arguments=flexmock(archive=None, paths=None, json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -68,7 +68,7 @@ def test_make_list_command_includes_json_but_not_debug():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=True), list_arguments=flexmock(archive=None, paths=None, json=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -84,7 +84,7 @@ def test_make_list_command_includes_json():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=True), list_arguments=flexmock(archive=None, paths=None, json=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -100,7 +100,7 @@ def test_make_list_command_includes_log_json():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False), list_arguments=flexmock(archive=None, paths=None, json=False),
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
@ -118,7 +118,7 @@ def test_make_list_command_includes_lock_wait():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={'lock_wait': 5}, config={'lock_wait': 5},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False), list_arguments=flexmock(archive=None, paths=None, json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -136,7 +136,7 @@ def test_make_list_command_includes_archive():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive='archive', paths=None, json=False), list_arguments=flexmock(archive='archive', paths=None, json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -154,7 +154,7 @@ def test_make_list_command_includes_archive_and_path():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive='archive', paths=['var/lib'], json=False), list_arguments=flexmock(archive='archive', paths=['var/lib'], json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -170,7 +170,7 @@ def test_make_list_command_includes_local_path():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False), list_arguments=flexmock(archive=None, paths=None, json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -193,7 +193,7 @@ def test_make_list_command_includes_remote_path():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False), list_arguments=flexmock(archive=None, paths=None, json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -210,7 +210,7 @@ def test_make_list_command_includes_short():
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock(archive=None, paths=None, json=False, short=True), list_arguments=flexmock(archive=None, paths=None, json=False, short=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -242,7 +242,7 @@ def test_make_list_command_includes_additional_flags(argument_name):
command = module.make_list_command( command = module.make_list_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=flexmock( list_arguments=flexmock(
archive=None, archive=None,
@ -293,7 +293,7 @@ def test_capture_archive_listing_does_not_raise():
module.capture_archive_listing( module.capture_archive_listing(
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
storage_config=flexmock(), config=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -319,7 +319,7 @@ def test_list_archive_calls_borg_with_flags():
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -337,7 +337,7 @@ def test_list_archive_calls_borg_with_flags():
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -355,7 +355,7 @@ def test_list_archive_with_archive_and_json_errors():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -382,7 +382,7 @@ def test_list_archive_calls_borg_with_local_path():
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -400,7 +400,7 @@ def test_list_archive_calls_borg_with_local_path():
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -450,7 +450,7 @@ def test_list_archive_calls_borg_multiple_times_with_find_paths():
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -477,7 +477,7 @@ def test_list_archive_calls_borg_with_archive():
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -495,7 +495,7 @@ def test_list_archive_calls_borg_with_archive():
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -526,7 +526,7 @@ def test_list_archive_without_archive_delegates_to_list_repository():
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -557,7 +557,7 @@ def test_list_archive_with_borg_features_without_archive_delegates_to_list_repos
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=list_arguments, list_arguments=list_arguments,
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -595,7 +595,7 @@ def test_list_archive_with_archive_ignores_archive_filter_flag(
).and_return(False) ).and_return(False)
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=argparse.Namespace( list_arguments=argparse.Namespace(
archive='archive', paths=None, json=False, find_paths=None, **default_filter_flags archive='archive', paths=None, json=False, find_paths=None, **default_filter_flags
@ -615,7 +615,7 @@ def test_list_archive_with_archive_ignores_archive_filter_flag(
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=argparse.Namespace( list_arguments=argparse.Namespace(
archive='archive', paths=None, json=False, find_paths=None, **altered_filter_flags archive='archive', paths=None, json=False, find_paths=None, **altered_filter_flags
@ -654,7 +654,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes
flexmock(module.rlist).should_receive('make_rlist_command').with_args( flexmock(module.rlist).should_receive('make_rlist_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=argparse.Namespace( rlist_arguments=argparse.Namespace(
repository='repo', short=True, format=None, json=None, **altered_filter_flags repository='repo', short=True, format=None, json=None, **altered_filter_flags
@ -671,7 +671,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=argparse.Namespace( list_arguments=argparse.Namespace(
repository='repo', repository='repo',
@ -690,7 +690,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes
flexmock(module).should_receive('make_list_command').with_args( flexmock(module).should_receive('make_list_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=argparse.Namespace( list_arguments=argparse.Namespace(
repository='repo', repository='repo',
@ -724,7 +724,7 @@ def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes
module.list_archive( module.list_archive(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
list_arguments=argparse.Namespace( list_arguments=argparse.Namespace(
repository='repo', repository='repo',

View file

@ -26,7 +26,7 @@ def test_mount_archive_calls_borg_with_required_flags():
repository_path='repo', repository_path='repo',
archive=None, archive=None,
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -49,7 +49,7 @@ def test_mount_archive_with_borg_features_calls_borg_with_repository_and_match_a
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -67,7 +67,7 @@ def test_mount_archive_without_archive_calls_borg_with_repository_flags_only():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -87,7 +87,7 @@ def test_mount_archive_calls_borg_with_path_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -107,7 +107,7 @@ def test_mount_archive_calls_borg_with_remote_path_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -126,7 +126,7 @@ def test_mount_archive_calls_borg_with_umask_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={'umask': '0770'}, config={'umask': '0770'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -144,7 +144,7 @@ def test_mount_archive_calls_borg_with_log_json_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -162,7 +162,7 @@ def test_mount_archive_calls_borg_with_lock_wait_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={'lock_wait': '5'}, config={'lock_wait': '5'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -181,7 +181,7 @@ def test_mount_archive_with_log_info_calls_borg_with_info_parameter():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -200,7 +200,7 @@ def test_mount_archive_with_log_debug_calls_borg_with_debug_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -224,7 +224,7 @@ def test_mount_archive_calls_borg_with_foreground_parameter():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -244,7 +244,7 @@ def test_mount_archive_calls_borg_with_options_flags():
repository_path='repo', repository_path='repo',
archive='archive', archive='archive',
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -305,7 +305,7 @@ def test_mount_archive_with_date_based_matching_calls_borg_with_date_based_flags
repository_path='repo', repository_path='repo',
archive=None, archive=None,
mount_arguments=mount_arguments, mount_arguments=mount_arguments,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )

View file

@ -1,5 +1,4 @@
import logging import logging
from collections import OrderedDict
from flexmock import flexmock from flexmock import flexmock
@ -22,23 +21,28 @@ BASE_PRUNE_FLAGS = ('--keep-daily', '1', '--keep-weekly', '2', '--keep-monthly',
def test_make_prune_flags_returns_flags_from_config(): def test_make_prune_flags_returns_flags_from_config():
retention_config = OrderedDict((('keep_daily', 1), ('keep_weekly', 2), ('keep_monthly', 3))) config = {
'keep_daily': 1,
'keep_weekly': 2,
'keep_monthly': 3,
}
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
result = module.make_prune_flags({}, retention_config, local_borg_version='1.2.3') result = module.make_prune_flags(config, local_borg_version='1.2.3')
assert result == BASE_PRUNE_FLAGS assert result == BASE_PRUNE_FLAGS
def test_make_prune_flags_accepts_prefix_with_placeholders(): def test_make_prune_flags_accepts_prefix_with_placeholders():
retention_config = OrderedDict( config = {
(('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003 'keep_daily': 1,
) 'prefix': 'Documents_{hostname}-{now}', # noqa: FS003
}
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
result = module.make_prune_flags({}, retention_config, local_borg_version='1.2.3') result = module.make_prune_flags(config, local_borg_version='1.2.3')
expected = ( expected = (
'--keep-daily', '--keep-daily',
@ -51,13 +55,14 @@ def test_make_prune_flags_accepts_prefix_with_placeholders():
def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives(): def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives():
retention_config = OrderedDict( config = {
(('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003 'keep_daily': 1,
) 'prefix': 'Documents_{hostname}-{now}', # noqa: FS003
}
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
result = module.make_prune_flags({}, retention_config, local_borg_version='1.2.3') result = module.make_prune_flags(config, local_borg_version='1.2.3')
expected = ( expected = (
'--keep-daily', '--keep-daily',
@ -70,12 +75,15 @@ def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives()
def test_make_prune_flags_prefers_prefix_to_archive_name_format(): def test_make_prune_flags_prefers_prefix_to_archive_name_format():
storage_config = {'archive_name_format': 'bar-{now}'} # noqa: FS003 config = {
retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'bar-'))) 'archive_name_format': 'bar-{now}', # noqa: FS003
'keep_daily': 1,
'prefix': 'bar-',
}
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_match_archives_flags').never() flexmock(module.flags).should_receive('make_match_archives_flags').never()
result = module.make_prune_flags(storage_config, retention_config, local_borg_version='1.2.3') result = module.make_prune_flags(config, local_borg_version='1.2.3')
expected = ( expected = (
'--keep-daily', '--keep-daily',
@ -88,14 +96,17 @@ def test_make_prune_flags_prefers_prefix_to_archive_name_format():
def test_make_prune_flags_without_prefix_uses_archive_name_format_instead(): def test_make_prune_flags_without_prefix_uses_archive_name_format_instead():
storage_config = {'archive_name_format': 'bar-{now}'} # noqa: FS003 config = {
retention_config = OrderedDict((('keep_daily', 1), ('prefix', None))) 'archive_name_format': 'bar-{now}', # noqa: FS003
'keep_daily': 1,
'prefix': None,
}
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_match_archives_flags').with_args( flexmock(module.flags).should_receive('make_match_archives_flags').with_args(
None, 'bar-{now}', '1.2.3' # noqa: FS003 None, 'bar-{now}', '1.2.3' # noqa: FS003
).and_return(('--match-archives', 'sh:bar-*')) ).and_return(('--match-archives', 'sh:bar-*'))
result = module.make_prune_flags(storage_config, retention_config, local_borg_version='1.2.3') result = module.make_prune_flags(config, local_borg_version='1.2.3')
expected = ( expected = (
'--keep-daily', '--keep-daily',
@ -121,8 +132,7 @@ def test_prune_archives_calls_borg_with_flags():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -140,9 +150,8 @@ def test_prune_archives_with_log_info_calls_borg_with_info_flag():
prune_arguments = flexmock(stats=False, list_archives=False) prune_arguments = flexmock(stats=False, list_archives=False)
module.prune_archives( module.prune_archives(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
dry_run=False, dry_run=False,
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -160,9 +169,8 @@ def test_prune_archives_with_log_debug_calls_borg_with_debug_flag():
prune_arguments = flexmock(stats=False, list_archives=False) prune_arguments = flexmock(stats=False, list_archives=False)
module.prune_archives( module.prune_archives(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
dry_run=False, dry_run=False,
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -179,9 +187,8 @@ def test_prune_archives_with_dry_run_calls_borg_with_dry_run_flag():
prune_arguments = flexmock(stats=False, list_archives=False) prune_arguments = flexmock(stats=False, list_archives=False)
module.prune_archives( module.prune_archives(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
dry_run=True, dry_run=True,
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -199,8 +206,7 @@ def test_prune_archives_with_local_path_calls_borg_via_local_path():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path='borg1', local_path='borg1',
@ -219,8 +225,7 @@ def test_prune_archives_with_remote_path_calls_borg_with_remote_path_flags():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -239,8 +244,7 @@ def test_prune_archives_with_stats_calls_borg_with_stats_flag_and_answer_output_
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -258,8 +262,7 @@ def test_prune_archives_with_files_calls_borg_with_list_flag_and_answer_output_l
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -269,7 +272,7 @@ def test_prune_archives_with_files_calls_borg_with_list_flag_and_answer_output_l
def test_prune_archives_with_umask_calls_borg_with_umask_flags(): def test_prune_archives_with_umask_calls_borg_with_umask_flags():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'umask': '077'} config = {'umask': '077'}
flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(PRUNE_COMMAND + ('--umask', '077', 'repo'), logging.INFO) insert_execute_command_mock(PRUNE_COMMAND + ('--umask', '077', 'repo'), logging.INFO)
@ -278,8 +281,7 @@ def test_prune_archives_with_umask_calls_borg_with_umask_flags():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -297,8 +299,7 @@ def test_prune_archives_with_log_json_calls_borg_with_log_json_flag():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -308,7 +309,7 @@ def test_prune_archives_with_log_json_calls_borg_with_log_json_flag():
def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_flags(): def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS)
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
insert_execute_command_mock(PRUNE_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO) insert_execute_command_mock(PRUNE_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO)
@ -317,8 +318,7 @@ def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -336,8 +336,7 @@ def test_prune_archives_with_extra_borg_options_calls_borg_with_extra_options():
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'extra_borg_options': {'prune': '--extra --options'}}, config={'extra_borg_options': {'prune': '--extra --options'}},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,
@ -400,8 +399,7 @@ def test_prune_archives_with_date_based_matching_calls_borg_with_date_based_flag
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
retention_config=flexmock(),
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
prune_arguments=prune_arguments, prune_arguments=prune_arguments,

View file

@ -46,7 +46,7 @@ def test_create_repository_calls_borg_with_flags():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -67,7 +67,7 @@ def test_create_repository_with_dry_run_skips_borg_call():
module.create_repository( module.create_repository(
dry_run=True, dry_run=True,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -92,7 +92,7 @@ def test_create_repository_raises_for_borg_rcreate_error():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -112,7 +112,7 @@ def test_create_repository_skips_creation_when_repository_already_exists():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -128,7 +128,7 @@ def test_create_repository_raises_for_unknown_rinfo_command_error():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -149,7 +149,7 @@ def test_create_repository_with_source_repository_calls_borg_with_other_repo_fla
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -171,7 +171,7 @@ def test_create_repository_with_copy_crypt_key_calls_borg_with_copy_crypt_key_fl
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -193,7 +193,7 @@ def test_create_repository_with_append_only_calls_borg_with_append_only_flag():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -215,7 +215,7 @@ def test_create_repository_with_storage_quota_calls_borg_with_storage_quota_flag
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -237,7 +237,7 @@ def test_create_repository_with_make_parent_dirs_calls_borg_with_make_parent_dir
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -260,7 +260,7 @@ def test_create_repository_with_log_info_calls_borg_with_info_flag():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -282,7 +282,7 @@ def test_create_repository_with_log_debug_calls_borg_with_debug_flag():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -303,7 +303,7 @@ def test_create_repository_with_log_json_calls_borg_with_log_json_flag():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
encryption_mode='repokey', encryption_mode='repokey',
@ -324,7 +324,7 @@ def test_create_repository_with_lock_wait_calls_borg_with_lock_wait_flag():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'lock_wait': 5}, config={'lock_wait': 5},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -345,7 +345,7 @@ def test_create_repository_with_local_path_calls_borg_via_local_path():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -367,7 +367,7 @@ def test_create_repository_with_remote_path_calls_borg_with_remote_path_flag():
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',
@ -389,7 +389,7 @@ def test_create_repository_with_extra_borg_options_calls_borg_with_extra_options
module.create_repository( module.create_repository(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'extra_borg_options': {'rcreate': '--extra --options'}}, config={'extra_borg_options': {'rcreate': '--extra --options'}},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
encryption_mode='repokey', encryption_mode='repokey',

View file

@ -27,7 +27,7 @@ def test_display_repository_info_calls_borg_with_flags():
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -49,7 +49,7 @@ def test_display_repository_info_without_borg_features_calls_borg_with_info_sub_
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -76,7 +76,7 @@ def test_display_repository_info_with_log_info_calls_borg_with_info_flag():
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -102,7 +102,7 @@ def test_display_repository_info_with_log_info_and_json_suppresses_most_borg_out
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
json_output = module.display_repository_info( json_output = module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=True), rinfo_arguments=flexmock(json=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -132,7 +132,7 @@ def test_display_repository_info_with_log_debug_calls_borg_with_debug_flag():
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -158,7 +158,7 @@ def test_display_repository_info_with_log_debug_and_json_suppresses_most_borg_ou
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
json_output = module.display_repository_info( json_output = module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=True), rinfo_arguments=flexmock(json=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -185,7 +185,7 @@ def test_display_repository_info_with_json_calls_borg_with_json_flag():
json_output = module.display_repository_info( json_output = module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=True), rinfo_arguments=flexmock(json=True),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -214,7 +214,7 @@ def test_display_repository_info_with_local_path_calls_borg_via_local_path():
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -242,7 +242,7 @@ def test_display_repository_info_with_remote_path_calls_borg_with_remote_path_fl
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -270,7 +270,7 @@ def test_display_repository_info_with_log_json_calls_borg_with_log_json_flags():
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
@ -280,7 +280,7 @@ def test_display_repository_info_with_log_json_calls_borg_with_log_json_flags():
def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_flags(): def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.feature).should_receive('available').and_return(True)
flexmock(module.flags).should_receive('make_repository_flags').and_return( flexmock(module.flags).should_receive('make_repository_flags').and_return(
( (
@ -298,7 +298,7 @@ def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_flags(
module.display_repository_info( module.display_repository_info(
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='2.3.4', local_borg_version='2.3.4',
rinfo_arguments=flexmock(json=False), rinfo_arguments=flexmock(json=False),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),

View file

@ -23,7 +23,7 @@ def test_resolve_archive_name_passes_through_non_latest_archive_name():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
archive, archive,
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -43,7 +43,7 @@ def test_resolve_archive_name_calls_borg_with_flags():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -64,7 +64,7 @@ def test_resolve_archive_name_with_log_info_calls_borg_without_info_flag():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -85,7 +85,7 @@ def test_resolve_archive_name_with_log_debug_calls_borg_without_debug_flag():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -105,7 +105,7 @@ def test_resolve_archive_name_with_local_path_calls_borg_via_local_path():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
local_path='borg1', local_path='borg1',
@ -126,7 +126,7 @@ def test_resolve_archive_name_with_remote_path_calls_borg_with_remote_path_flags
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
remote_path='borg1', remote_path='borg1',
@ -146,7 +146,7 @@ def test_resolve_archive_name_without_archives_raises():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -165,7 +165,7 @@ def test_resolve_archive_name_with_log_json_calls_borg_with_log_json_flags():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=True), global_arguments=flexmock(log_json=True),
) )
@ -186,7 +186,7 @@ def test_resolve_archive_name_with_lock_wait_calls_borg_with_lock_wait_flags():
module.resolve_archive_name( module.resolve_archive_name(
'repo', 'repo',
'latest', 'latest',
storage_config={'lock_wait': 'okay'}, config={'lock_wait': 'okay'},
local_borg_version='1.2.3', local_borg_version='1.2.3',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
) )
@ -205,7 +205,7 @@ def test_make_rlist_command_includes_log_info():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -227,7 +227,7 @@ def test_make_rlist_command_includes_json_but_not_info():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=True, prefix=None, match_archives=None archive=None, paths=None, json=True, prefix=None, match_archives=None
@ -249,7 +249,7 @@ def test_make_rlist_command_includes_log_debug():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -271,7 +271,7 @@ def test_make_rlist_command_includes_json_but_not_debug():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=True, prefix=None, match_archives=None archive=None, paths=None, json=True, prefix=None, match_archives=None
@ -292,7 +292,7 @@ def test_make_rlist_command_includes_json():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=True, prefix=None, match_archives=None archive=None, paths=None, json=True, prefix=None, match_archives=None
@ -315,7 +315,7 @@ def test_make_rlist_command_includes_log_json():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -338,7 +338,7 @@ def test_make_rlist_command_includes_lock_wait():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={'lock_wait': 5}, config={'lock_wait': 5},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -359,7 +359,7 @@ def test_make_rlist_command_includes_local_path():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -383,7 +383,7 @@ def test_make_rlist_command_includes_remote_path():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -407,7 +407,7 @@ def test_make_rlist_command_transforms_prefix_into_match_archives():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'), rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -426,7 +426,7 @@ def test_make_rlist_command_prefers_prefix_over_archive_name_format():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'), rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'),
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
@ -445,7 +445,7 @@ def test_make_rlist_command_transforms_archive_name_format_into_match_archives()
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None archive=None, paths=None, json=False, prefix=None, match_archives=None
@ -466,7 +466,7 @@ def test_make_rlist_command_includes_short():
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, paths=None, json=False, prefix=None, match_archives=None, short=True archive=None, paths=None, json=False, prefix=None, match_archives=None, short=True
@ -501,7 +501,7 @@ def test_make_rlist_command_includes_additional_flags(argument_name):
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, archive=None,
@ -534,7 +534,7 @@ def test_make_rlist_command_with_match_archives_calls_borg_with_match_archives_f
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, archive=None,
@ -560,7 +560,7 @@ def test_list_repository_calls_borg_with_flags():
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module).should_receive('make_rlist_command').with_args( flexmock(module).should_receive('make_rlist_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -577,7 +577,7 @@ def test_list_repository_calls_borg_with_flags():
module.list_repository( module.list_repository(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -594,7 +594,7 @@ def test_list_repository_with_json_returns_borg_output():
flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.feature).should_receive('available').and_return(False)
flexmock(module).should_receive('make_rlist_command').with_args( flexmock(module).should_receive('make_rlist_command').with_args(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -607,7 +607,7 @@ def test_list_repository_with_json_returns_borg_output():
assert ( assert (
module.list_repository( module.list_repository(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=rlist_arguments, rlist_arguments=rlist_arguments,
global_arguments=global_arguments, global_arguments=global_arguments,
@ -628,7 +628,7 @@ def test_make_rlist_command_with_date_based_matching_calls_borg_with_date_based_
command = module.make_rlist_command( command = module.make_rlist_command(
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='1.2.3', local_borg_version='1.2.3',
rlist_arguments=flexmock( rlist_arguments=flexmock(
archive=None, archive=None,

View file

@ -27,7 +27,7 @@ def test_transfer_archives_calls_borg_with_flags():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -58,7 +58,7 @@ def test_transfer_archives_with_dry_run_calls_borg_with_dry_run_flag():
module.transfer_archives( module.transfer_archives(
dry_run=True, dry_run=True,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -86,7 +86,7 @@ def test_transfer_archives_with_log_info_calls_borg_with_info_flag():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -115,7 +115,7 @@ def test_transfer_archives_with_log_debug_calls_borg_with_debug_flag():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -145,7 +145,7 @@ def test_transfer_archives_with_archive_calls_borg_with_match_archives_flag():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive='archive', progress=None, match_archives=None, source_repository=None archive='archive', progress=None, match_archives=None, source_repository=None
@ -175,7 +175,7 @@ def test_transfer_archives_with_match_archives_calls_borg_with_match_archives_fl
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives='sh:foo*', source_repository=None archive=None, progress=None, match_archives='sh:foo*', source_repository=None
@ -205,7 +205,7 @@ def test_transfer_archives_with_archive_name_format_calls_borg_with_match_archiv
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={'archive_name_format': 'bar-{now}'}, # noqa: FS003 config={'archive_name_format': 'bar-{now}'}, # noqa: FS003
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -233,7 +233,7 @@ def test_transfer_archives_with_local_path_calls_borg_via_local_path():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -265,7 +265,7 @@ def test_transfer_archives_with_remote_path_calls_borg_with_remote_path_flags():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -297,7 +297,7 @@ def test_transfer_archives_with_log_json_calls_borg_with_log_json_flags():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -316,7 +316,7 @@ def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(()) flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(())
flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo'))
storage_config = {'lock_wait': 5} config = {'lock_wait': 5}
flexmock(module.environment).should_receive('make_environment') flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command').with_args( flexmock(module).should_receive('execute_command').with_args(
('borg', 'transfer', '--lock-wait', '5', '--repo', 'repo'), ('borg', 'transfer', '--lock-wait', '5', '--repo', 'repo'),
@ -329,7 +329,7 @@ def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config=storage_config, config=config,
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository=None archive=None, progress=None, match_archives=None, source_repository=None
@ -357,7 +357,7 @@ def test_transfer_archives_with_progress_calls_borg_with_progress_flag():
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=True, match_archives=None, source_repository=None archive=None, progress=True, match_archives=None, source_repository=None
@ -389,7 +389,7 @@ def test_transfer_archives_passes_through_arguments_to_borg(argument_name):
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, archive=None,
@ -423,7 +423,7 @@ def test_transfer_archives_with_source_repository_calls_borg_with_other_repo_fla
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
transfer_arguments=flexmock( transfer_arguments=flexmock(
archive=None, progress=None, match_archives=None, source_repository='other' archive=None, progress=None, match_archives=None, source_repository='other'
@ -465,7 +465,7 @@ def test_transfer_archives_with_date_based_matching_calls_borg_with_date_based_f
module.transfer_archives( module.transfer_archives(
dry_run=False, dry_run=False,
repository_path='repo', repository_path='repo',
storage_config={}, config={},
local_borg_version='2.3.4', local_borg_version='2.3.4',
global_arguments=flexmock(log_json=False), global_arguments=flexmock(log_json=False),
transfer_arguments=flexmock( transfer_arguments=flexmock(

View file

@ -15,7 +15,7 @@ def test_run_configuration_runs_actions_for_each_repository():
flexmock(module).should_receive('run_actions').and_return(expected_results[:1]).and_return( flexmock(module).should_receive('run_actions').and_return(expected_results[:1]).and_return(
expected_results[1:] expected_results[1:]
) )
config = {'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}} config = {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}
arguments = {'global': flexmock(monitoring_verbosity=1)} arguments = {'global': flexmock(monitoring_verbosity=1)}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -29,7 +29,7 @@ def test_run_configuration_with_invalid_borg_version_errors():
flexmock(module.command).should_receive('execute_hook').never() flexmock(module.command).should_receive('execute_hook').never()
flexmock(module.dispatch).should_receive('call_hooks').never() flexmock(module.dispatch).should_receive('call_hooks').never()
flexmock(module).should_receive('run_actions').never() flexmock(module).should_receive('run_actions').never()
config = {'location': {'repositories': ['foo']}} config = {'repositories': ['foo']}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'prune': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'prune': flexmock()}
list(module.run_configuration('test.yaml', config, arguments)) list(module.run_configuration('test.yaml', config, arguments))
@ -44,7 +44,7 @@ def test_run_configuration_logs_monitor_start_error():
expected_results = [flexmock()] expected_results = [flexmock()]
flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').never() flexmock(module).should_receive('run_actions').never()
config = {'location': {'repositories': ['foo']}} config = {'repositories': ['foo']}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -59,7 +59,7 @@ def test_run_configuration_bails_for_monitor_start_soft_failure():
flexmock(module.dispatch).should_receive('call_hooks').and_raise(error) flexmock(module.dispatch).should_receive('call_hooks').and_raise(error)
flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('log_error_records').never()
flexmock(module).should_receive('run_actions').never() flexmock(module).should_receive('run_actions').never()
config = {'location': {'repositories': ['foo']}} config = {'repositories': ['foo']}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -75,7 +75,7 @@ def test_run_configuration_logs_actions_error():
expected_results = [flexmock()] expected_results = [flexmock()]
flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_raise(OSError) flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False)} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False)}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -91,7 +91,7 @@ def test_run_configuration_bails_for_actions_soft_failure():
flexmock(module).should_receive('run_actions').and_raise(error) flexmock(module).should_receive('run_actions').and_raise(error)
flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('log_error_records').never()
flexmock(module.command).should_receive('considered_soft_failure').and_return(True) flexmock(module.command).should_receive('considered_soft_failure').and_return(True)
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -108,7 +108,7 @@ def test_run_configuration_logs_monitor_log_error():
expected_results = [flexmock()] expected_results = [flexmock()]
flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_return([]) flexmock(module).should_receive('run_actions').and_return([])
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -126,7 +126,7 @@ def test_run_configuration_bails_for_monitor_log_soft_failure():
flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('log_error_records').never()
flexmock(module).should_receive('run_actions').and_return([]) flexmock(module).should_receive('run_actions').and_return([])
flexmock(module.command).should_receive('considered_soft_failure').and_return(True) flexmock(module.command).should_receive('considered_soft_failure').and_return(True)
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -143,7 +143,7 @@ def test_run_configuration_logs_monitor_finish_error():
expected_results = [flexmock()] expected_results = [flexmock()]
flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_return([]) flexmock(module).should_receive('run_actions').and_return([])
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -161,7 +161,7 @@ def test_run_configuration_bails_for_monitor_finish_soft_failure():
flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('log_error_records').never()
flexmock(module).should_receive('run_actions').and_return([]) flexmock(module).should_receive('run_actions').and_return([])
flexmock(module.command).should_receive('considered_soft_failure').and_return(True) flexmock(module.command).should_receive('considered_soft_failure').and_return(True)
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -176,7 +176,7 @@ def test_run_configuration_does_not_call_monitoring_hooks_if_monitoring_hooks_ar
flexmock(module.dispatch).should_receive('call_hooks').never() flexmock(module.dispatch).should_receive('call_hooks').never()
flexmock(module).should_receive('run_actions').and_return([]) flexmock(module).should_receive('run_actions').and_return([])
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=-2, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=-2, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
assert results == [] assert results == []
@ -191,7 +191,7 @@ def test_run_configuration_logs_on_error_hook_error():
expected_results[:1] expected_results[:1]
).and_return(expected_results[1:]) ).and_return(expected_results[1:])
flexmock(module).should_receive('run_actions').and_raise(OSError) flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -207,7 +207,7 @@ def test_run_configuration_bails_for_on_error_hook_soft_failure():
expected_results = [flexmock()] expected_results = [flexmock()]
flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('log_error_records').and_return(expected_results)
flexmock(module).should_receive('run_actions').and_raise(OSError) flexmock(module).should_receive('run_actions').and_raise(OSError)
config = {'location': {'repositories': [{'path': 'foo'}]}} config = {'repositories': [{'path': 'foo'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -222,7 +222,7 @@ def test_run_configuration_retries_soft_error():
flexmock(module.command).should_receive('execute_hook') flexmock(module.command).should_receive('execute_hook')
flexmock(module).should_receive('run_actions').and_raise(OSError).and_return([]) flexmock(module).should_receive('run_actions').and_raise(OSError).and_return([])
flexmock(module).should_receive('log_error_records').and_return([flexmock()]).once() flexmock(module).should_receive('log_error_records').and_return([flexmock()]).once()
config = {'location': {'repositories': [{'path': 'foo'}]}, 'storage': {'retries': 1}} config = {'repositories': [{'path': 'foo'}], 'retries': 1}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
assert results == [] assert results == []
@ -245,7 +245,7 @@ def test_run_configuration_retries_hard_error():
'foo: Error running actions for repository', 'foo: Error running actions for repository',
OSError, OSError,
).and_return(error_logs) ).and_return(error_logs)
config = {'location': {'repositories': [{'path': 'foo'}]}, 'storage': {'retries': 1}} config = {'repositories': [{'path': 'foo'}], 'retries': 1}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
assert results == error_logs assert results == error_logs
@ -263,7 +263,7 @@ def test_run_configuration_repos_ordered():
flexmock(module).should_receive('log_error_records').with_args( flexmock(module).should_receive('log_error_records').with_args(
'bar: Error running actions for repository', OSError 'bar: Error running actions for repository', OSError
).and_return(expected_results[1:]).ordered() ).and_return(expected_results[1:]).ordered()
config = {'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}} config = {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
assert results == expected_results assert results == expected_results
@ -295,8 +295,8 @@ def test_run_configuration_retries_round_robin():
'bar: Error running actions for repository', OSError 'bar: Error running actions for repository', OSError
).and_return(bar_error_logs).ordered() ).and_return(bar_error_logs).ordered()
config = { config = {
'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}, 'repositories': [{'path': 'foo'}, {'path': 'bar'}],
'storage': {'retries': 1}, 'retries': 1,
} }
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -327,8 +327,8 @@ def test_run_configuration_retries_one_passes():
'bar: Error running actions for repository', OSError 'bar: Error running actions for repository', OSError
).and_return(error_logs).ordered() ).and_return(error_logs).ordered()
config = { config = {
'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}, 'repositories': [{'path': 'foo'}, {'path': 'bar'}],
'storage': {'retries': 1}, 'retries': 1,
} }
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -369,8 +369,9 @@ def test_run_configuration_retry_wait():
'foo: Error running actions for repository', OSError 'foo: Error running actions for repository', OSError
).and_return(error_logs).ordered() ).and_return(error_logs).ordered()
config = { config = {
'location': {'repositories': [{'path': 'foo'}]}, 'repositories': [{'path': 'foo'}],
'storage': {'retries': 3, 'retry_wait': 10}, 'retries': 3,
'retry_wait': 10,
} }
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -407,8 +408,9 @@ def test_run_configuration_retries_timeout_multiple_repos():
'bar: Error running actions for repository', OSError 'bar: Error running actions for repository', OSError
).and_return(error_logs).ordered() ).and_return(error_logs).ordered()
config = { config = {
'location': {'repositories': [{'path': 'foo'}, {'path': 'bar'}]}, 'repositories': [{'path': 'foo'}, {'path': 'bar'}],
'storage': {'retries': 1, 'retry_wait': 10}, 'retries': 1,
'retry_wait': 10,
} }
arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()}
results = list(module.run_configuration('test.yaml', config, arguments)) results = list(module.run_configuration('test.yaml', config, arguments))
@ -424,11 +426,7 @@ def test_run_actions_runs_rcreate():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rcreate': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rcreate': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -444,9 +442,7 @@ def test_run_actions_adds_log_file_to_hook_context():
flexmock(borgmatic.actions.create).should_receive('run_create').with_args( flexmock(borgmatic.actions.create).should_receive('run_create').with_args(
config_filename=object, config_filename=object,
repository={'path': 'repo'}, repository={'path': 'repo'},
location={'repositories': []}, config={'repositories': []},
storage=object,
hooks={},
hook_context={'repository': 'repo', 'repositories': '', 'log_file': 'foo'}, hook_context={'repository': 'repo', 'repositories': '', 'log_file': 'foo'},
local_borg_version=object, local_borg_version=object,
create_arguments=object, create_arguments=object,
@ -460,11 +456,7 @@ def test_run_actions_adds_log_file_to_hook_context():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'create': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'create': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -483,11 +475,7 @@ def test_run_actions_runs_transfer():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'transfer': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'transfer': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -506,11 +494,7 @@ def test_run_actions_runs_create():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'create': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'create': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -529,11 +513,7 @@ def test_run_actions_runs_prune():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'prune': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'prune': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -551,11 +531,7 @@ def test_run_actions_runs_compact():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'compact': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'compact': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -574,11 +550,7 @@ def test_run_actions_runs_check_when_repository_enabled_for_checks():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'check': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'check': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -597,11 +569,7 @@ def test_run_actions_skips_check_when_repository_not_enabled_for_checks():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'check': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'check': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -619,11 +587,7 @@ def test_run_actions_runs_extract():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'extract': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'extract': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -641,11 +605,7 @@ def test_run_actions_runs_export_tar():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'export-tar': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'export-tar': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -663,11 +623,7 @@ def test_run_actions_runs_mount():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'mount': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'mount': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -685,11 +641,7 @@ def test_run_actions_runs_restore():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'restore': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'restore': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -708,11 +660,7 @@ def test_run_actions_runs_rlist():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rlist': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rlist': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -732,11 +680,7 @@ def test_run_actions_runs_list():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'list': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'list': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -756,11 +700,7 @@ def test_run_actions_runs_rinfo():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rinfo': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'rinfo': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -780,11 +720,7 @@ def test_run_actions_runs_info():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'info': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'info': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -803,11 +739,7 @@ def test_run_actions_runs_break_lock():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'break-lock': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'break-lock': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -825,11 +757,7 @@ def test_run_actions_runs_borg():
module.run_actions( module.run_actions(
arguments={'global': flexmock(dry_run=False, log_file='foo'), 'borg': flexmock()}, arguments={'global': flexmock(dry_run=False, log_file='foo'), 'borg': flexmock()},
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -852,11 +780,7 @@ def test_run_actions_runs_multiple_actions_in_argument_order():
'restore': flexmock(), 'restore': flexmock(),
}, },
config_filename=flexmock(), config_filename=flexmock(),
location={'repositories': []}, config={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(), local_path=flexmock(),
remote_path=flexmock(), remote_path=flexmock(),
local_borg_version=flexmock(), local_borg_version=flexmock(),
@ -951,15 +875,11 @@ def test_log_error_records_generates_nothing_for_other_error():
def test_get_local_path_uses_configuration_value(): def test_get_local_path_uses_configuration_value():
assert module.get_local_path({'test.yaml': {'location': {'local_path': 'borg1'}}}) == 'borg1' assert module.get_local_path({'test.yaml': {'local_path': 'borg1'}}) == 'borg1'
def test_get_local_path_without_location_defaults_to_borg():
assert module.get_local_path({'test.yaml': {}}) == 'borg'
def test_get_local_path_without_local_path_defaults_to_borg(): def test_get_local_path_without_local_path_defaults_to_borg():
assert module.get_local_path({'test.yaml': {'location': {}}}) == 'borg' assert module.get_local_path({'test.yaml': {}}) == 'borg'
def test_collect_highlander_action_summary_logs_info_for_success_with_bootstrap(): def test_collect_highlander_action_summary_logs_info_for_success_with_bootstrap():

View file

@ -13,35 +13,20 @@ def test_schema_to_sample_configuration_generates_config_map_with_examples():
'type': 'object', 'type': 'object',
'properties': OrderedDict( 'properties': OrderedDict(
[ [
( ('field1', {'example': 'Example 1'}),
'section1', ('field2', {'example': 'Example 2'}),
{ ('field3', {'example': 'Example 3'}),
'type': 'object',
'properties': {'field1': OrderedDict([('example', 'Example 1')])},
},
),
(
'section2',
{
'type': 'object',
'properties': OrderedDict(
[
('field2', {'example': 'Example 2'}),
('field3', {'example': 'Example 3'}),
]
),
},
),
] ]
), ),
} }
config = module._schema_to_sample_configuration(schema) config = module.schema_to_sample_configuration(schema)
assert config == OrderedDict( assert config == OrderedDict(
[ [
('section1', OrderedDict([('field1', 'Example 1')])), ('field1', 'Example 1'),
('section2', OrderedDict([('field2', 'Example 2'), ('field3', 'Example 3')])), ('field2', 'Example 2'),
('field3', 'Example 3'),
] ]
) )
@ -51,7 +36,7 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_strings_wit
flexmock(module).should_receive('add_comments_to_configuration_sequence') flexmock(module).should_receive('add_comments_to_configuration_sequence')
schema = {'type': 'array', 'items': {'type': 'string'}, 'example': ['hi']} schema = {'type': 'array', 'items': {'type': 'string'}, 'example': ['hi']}
config = module._schema_to_sample_configuration(schema) config = module.schema_to_sample_configuration(schema)
assert config == ['hi'] assert config == ['hi']
@ -70,7 +55,7 @@ def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_e
}, },
} }
config = module._schema_to_sample_configuration(schema) config = module.schema_to_sample_configuration(schema)
assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])] assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])]
@ -79,7 +64,7 @@ def test_schema_to_sample_configuration_with_unsupported_schema_raises():
schema = {'gobbledygook': [{'type': 'not-your'}]} schema = {'gobbledygook': [{'type': 'not-your'}]}
with pytest.raises(ValueError): with pytest.raises(ValueError):
module._schema_to_sample_configuration(schema) module.schema_to_sample_configuration(schema)
def test_merge_source_configuration_into_destination_inserts_map_fields(): def test_merge_source_configuration_into_destination_inserts_map_fields():

View file

@ -1,4 +1,5 @@
import pytest import pytest
from flexmock import flexmock
from borgmatic.config import normalize as module from borgmatic.config import normalize as module
@ -7,138 +8,220 @@ from borgmatic.config import normalize as module
'config,expected_config,produces_logs', 'config,expected_config,produces_logs',
( (
( (
{'location': {'exclude_if_present': '.nobackup'}}, {'location': {'foo': 'bar', 'baz': 'quux'}},
{'location': {'exclude_if_present': ['.nobackup']}}, {'foo': 'bar', 'baz': 'quux'},
True, True,
), ),
( (
{'location': {'exclude_if_present': ['.nobackup']}}, {'retention': {'foo': 'bar', 'baz': 'quux'}},
{'location': {'exclude_if_present': ['.nobackup']}}, {'foo': 'bar', 'baz': 'quux'},
True,
),
(
{'consistency': {'foo': 'bar', 'baz': 'quux'}},
{'foo': 'bar', 'baz': 'quux'},
True,
),
(
{'output': {'foo': 'bar', 'baz': 'quux'}},
{'foo': 'bar', 'baz': 'quux'},
True,
),
(
{'hooks': {'foo': 'bar', 'baz': 'quux'}},
{'foo': 'bar', 'baz': 'quux'},
True,
),
(
{'location': {'foo': 'bar'}, 'storage': {'baz': 'quux'}},
{'foo': 'bar', 'baz': 'quux'},
True,
),
(
{'foo': 'bar', 'baz': 'quux'},
{'foo': 'bar', 'baz': 'quux'},
False, False,
), ),
( (
{'location': {'source_directories': ['foo', 'bar']}}, {'location': {'prefix': 'foo'}, 'consistency': {'prefix': 'foo'}},
{'location': {'source_directories': ['foo', 'bar']}}, {'prefix': 'foo'},
True,
),
(
{'location': {'prefix': 'foo'}, 'consistency': {'prefix': 'foo'}},
{'prefix': 'foo'},
True,
),
(
{'location': {'prefix': 'foo'}, 'consistency': {'bar': 'baz'}},
{'prefix': 'foo', 'bar': 'baz'},
True,
),
(
{'storage': {'umask': 'foo'}, 'hooks': {'umask': 'foo'}},
{'umask': 'foo'},
True,
),
(
{'storage': {'umask': 'foo'}, 'hooks': {'umask': 'foo'}},
{'umask': 'foo'},
True,
),
(
{'storage': {'umask': 'foo'}, 'hooks': {'bar': 'baz'}},
{'umask': 'foo', 'bar': 'baz'},
True,
),
(
{'location': {'bar': 'baz'}, 'consistency': {'prefix': 'foo'}},
{'bar': 'baz', 'prefix': 'foo'},
True,
),
(
{},
{},
False,
),
),
)
def test_normalize_sections_moves_section_options_to_global_scope(
config, expected_config, produces_logs
):
logs = module.normalize_sections('test.yaml', config)
assert config == expected_config
if produces_logs:
assert logs
else:
assert logs == []
def test_normalize_sections_with_different_prefix_values_raises():
config = {'location': {'prefix': 'foo'}, 'consistency': {'prefix': 'bar'}}
with pytest.raises(ValueError):
module.normalize_sections('test.yaml', config)
def test_normalize_sections_with_different_umask_values_raises():
config = {'storage': {'umask': 'foo'}, 'hooks': {'umask': 'bar'}}
with pytest.raises(ValueError):
module.normalize_sections('test.yaml', config)
@pytest.mark.parametrize(
'config,expected_config,produces_logs',
(
(
{'exclude_if_present': '.nobackup'},
{'exclude_if_present': ['.nobackup']},
True,
),
(
{'exclude_if_present': ['.nobackup']},
{'exclude_if_present': ['.nobackup']},
False, False,
), ),
( (
{'location': None}, {'source_directories': ['foo', 'bar']},
{'location': None}, {'source_directories': ['foo', 'bar']},
False, False,
), ),
( (
{'storage': {'compression': 'yes_please'}}, {'compression': 'yes_please'},
{'storage': {'compression': 'yes_please'}}, {'compression': 'yes_please'},
False, False,
), ),
( (
{'storage': None}, {'healthchecks': 'https://example.com'},
{'storage': None}, {'healthchecks': {'ping_url': 'https://example.com'}},
True,
),
(
{'cronitor': 'https://example.com'},
{'cronitor': {'ping_url': 'https://example.com'}},
True,
),
(
{'pagerduty': 'https://example.com'},
{'pagerduty': {'integration_key': 'https://example.com'}},
True,
),
(
{'cronhub': 'https://example.com'},
{'cronhub': {'ping_url': 'https://example.com'}},
True,
),
(
{'checks': ['archives']},
{'checks': [{'name': 'archives'}]},
True,
),
(
{'checks': ['archives']},
{'checks': [{'name': 'archives'}]},
True,
),
(
{'numeric_owner': False},
{'numeric_ids': False},
True,
),
(
{'bsd_flags': False},
{'flags': False},
True,
),
(
{'remote_rate_limit': False},
{'upload_rate_limit': False},
True,
),
(
{'repositories': ['foo@bar:/repo']},
{'repositories': [{'path': 'ssh://foo@bar/repo'}]},
True,
),
(
{'repositories': ['foo@bar:repo']},
{'repositories': [{'path': 'ssh://foo@bar/./repo'}]},
True,
),
(
{'repositories': ['foo@bar:~/repo']},
{'repositories': [{'path': 'ssh://foo@bar/~/repo'}]},
True,
),
(
{'repositories': ['ssh://foo@bar:1234/repo']},
{'repositories': [{'path': 'ssh://foo@bar:1234/repo'}]},
True,
),
(
{'repositories': ['file:///repo']},
{'repositories': [{'path': '/repo'}]},
True,
),
(
{'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}]},
{'repositories': [{'path': 'ssh://foo@bar/repo', 'label': 'foo'}]},
True,
),
(
{'repositories': [{'path': 'file:///repo', 'label': 'foo'}]},
{'repositories': [{'path': '/repo', 'label': 'foo'}]},
False, False,
), ),
( (
{'hooks': {'healthchecks': 'https://example.com'}}, {'repositories': [{'path': '/repo', 'label': 'foo'}]},
{'hooks': {'healthchecks': {'ping_url': 'https://example.com'}}}, {'repositories': [{'path': '/repo', 'label': 'foo'}]},
True,
),
(
{'hooks': {'cronitor': 'https://example.com'}},
{'hooks': {'cronitor': {'ping_url': 'https://example.com'}}},
True,
),
(
{'hooks': {'pagerduty': 'https://example.com'}},
{'hooks': {'pagerduty': {'integration_key': 'https://example.com'}}},
True,
),
(
{'hooks': {'cronhub': 'https://example.com'}},
{'hooks': {'cronhub': {'ping_url': 'https://example.com'}}},
True,
),
(
{'hooks': None},
{'hooks': None},
False, False,
), ),
( (
{'consistency': {'checks': ['archives']}}, {'prefix': 'foo'},
{'consistency': {'checks': [{'name': 'archives'}]}}, {'prefix': 'foo'},
True,
),
(
{'consistency': {'checks': ['archives']}},
{'consistency': {'checks': [{'name': 'archives'}]}},
True,
),
(
{'consistency': None},
{'consistency': None},
False,
),
(
{'location': {'numeric_owner': False}},
{'location': {'numeric_ids': False}},
True,
),
(
{'location': {'bsd_flags': False}},
{'location': {'flags': False}},
True,
),
(
{'storage': {'remote_rate_limit': False}},
{'storage': {'upload_rate_limit': False}},
True,
),
(
{'location': {'repositories': ['foo@bar:/repo']}},
{'location': {'repositories': [{'path': 'ssh://foo@bar/repo'}]}},
True,
),
(
{'location': {'repositories': ['foo@bar:repo']}},
{'location': {'repositories': [{'path': 'ssh://foo@bar/./repo'}]}},
True,
),
(
{'location': {'repositories': ['foo@bar:~/repo']}},
{'location': {'repositories': [{'path': 'ssh://foo@bar/~/repo'}]}},
True,
),
(
{'location': {'repositories': ['ssh://foo@bar:1234/repo']}},
{'location': {'repositories': [{'path': 'ssh://foo@bar:1234/repo'}]}},
True,
),
(
{'location': {'repositories': ['file:///repo']}},
{'location': {'repositories': [{'path': '/repo'}]}},
True,
),
(
{'location': {'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}]}},
{'location': {'repositories': [{'path': 'ssh://foo@bar/repo', 'label': 'foo'}]}},
True,
),
(
{'location': {'repositories': [{'path': 'file:///repo', 'label': 'foo'}]}},
{'location': {'repositories': [{'path': '/repo', 'label': 'foo'}]}},
False,
),
(
{'location': {'repositories': [{'path': '/repo', 'label': 'foo'}]}},
{'location': {'repositories': [{'path': '/repo', 'label': 'foo'}]}},
False,
),
(
{'consistency': {'prefix': 'foo'}},
{'consistency': {'prefix': 'foo'}},
True,
),
(
{'retention': {'prefix': 'foo'}},
{'retention': {'prefix': 'foo'}},
True, True,
), ),
), ),
@ -146,6 +229,8 @@ from borgmatic.config import normalize as module
def test_normalize_applies_hard_coded_normalization_to_config( def test_normalize_applies_hard_coded_normalization_to_config(
config, expected_config, produces_logs config, expected_config, produces_logs
): ):
flexmock(module).should_receive('normalize_sections').and_return([])
logs = module.normalize('test.yaml', config) logs = module.normalize('test.yaml', config)
assert config == expected_config assert config == expected_config
@ -157,12 +242,12 @@ def test_normalize_applies_hard_coded_normalization_to_config(
def test_normalize_raises_error_if_repository_data_is_not_consistent(): def test_normalize_raises_error_if_repository_data_is_not_consistent():
flexmock(module).should_receive('normalize_sections').and_return([])
with pytest.raises(TypeError): with pytest.raises(TypeError):
module.normalize( module.normalize(
'test.yaml', 'test.yaml',
{ {
'location': { 'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}, 'file:///repo'],
'repositories': [{'path': 'foo@bar:/repo', 'label': 'foo'}, 'file:///repo']
}
}, },
) )

View file

@ -32,54 +32,76 @@ def test_set_values_with_one_key_overwrites_existing_key():
def test_set_values_with_multiple_keys_creates_hierarchy(): def test_set_values_with_multiple_keys_creates_hierarchy():
config = {} config = {}
module.set_values(config, ('section', 'key'), 'value') module.set_values(config, ('option', 'suboption'), 'value')
assert config == {'section': {'key': 'value'}} assert config == {'option': {'suboption': 'value'}}
def test_set_values_with_multiple_keys_updates_hierarchy(): def test_set_values_with_multiple_keys_updates_hierarchy():
config = {'section': {'other': 'other_value'}} config = {'option': {'other': 'other_value'}}
module.set_values(config, ('section', 'key'), 'value') module.set_values(config, ('option', 'key'), 'value')
assert config == {'section': {'key': 'value', 'other': 'other_value'}} assert config == {'option': {'key': 'value', 'other': 'other_value'}}
@pytest.mark.parametrize(
'key,expected_key',
(
(('foo', 'bar'), ('foo', 'bar')),
(('location', 'foo'), ('foo',)),
(('storage', 'foo'), ('foo',)),
(('retention', 'foo'), ('foo',)),
(('consistency', 'foo'), ('foo',)),
(('output', 'foo'), ('foo',)),
(('hooks', 'foo', 'bar'), ('foo', 'bar')),
(('foo', 'hooks'), ('foo', 'hooks')),
),
)
def test_strip_section_names_passes_through_key_without_section_name(key, expected_key):
assert module.strip_section_names(key) == expected_key
def test_parse_overrides_splits_keys_and_values(): def test_parse_overrides_splits_keys_and_values():
flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value)
flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
raw_overrides = ['section.my_option=value1', 'section.other_option=value2'] raw_overrides = ['option.my_option=value1', 'other_option=value2']
expected_result = ( expected_result = (
(('section', 'my_option'), 'value1'), (('option', 'my_option'), 'value1'),
(('section', 'other_option'), 'value2'), (('other_option'), 'value2'),
) )
module.parse_overrides(raw_overrides) == expected_result module.parse_overrides(raw_overrides) == expected_result
def test_parse_overrides_allows_value_with_equal_sign(): def test_parse_overrides_allows_value_with_equal_sign():
flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value)
flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
raw_overrides = ['section.option=this===value'] raw_overrides = ['option=this===value']
expected_result = ((('section', 'option'), 'this===value'),) expected_result = ((('option',), 'this===value'),)
module.parse_overrides(raw_overrides) == expected_result module.parse_overrides(raw_overrides) == expected_result
def test_parse_overrides_raises_on_missing_equal_sign(): def test_parse_overrides_raises_on_missing_equal_sign():
flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value)
flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
raw_overrides = ['section.option'] raw_overrides = ['option']
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.parse_overrides(raw_overrides) module.parse_overrides(raw_overrides)
def test_parse_overrides_raises_on_invalid_override_value(): def test_parse_overrides_raises_on_invalid_override_value():
flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value)
flexmock(module).should_receive('convert_value_type').and_raise(ruamel.yaml.parser.ParserError) flexmock(module).should_receive('convert_value_type').and_raise(ruamel.yaml.parser.ParserError)
raw_overrides = ['section.option=[in valid]'] raw_overrides = ['option=[in valid]']
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.parse_overrides(raw_overrides) module.parse_overrides(raw_overrides)
def test_parse_overrides_allows_value_with_single_key(): def test_parse_overrides_allows_value_with_single_key():
flexmock(module).should_receive('strip_section_names').replace_with(lambda value: value)
flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value)
raw_overrides = ['option=value'] raw_overrides = ['option=value']
expected_result = ((('option',), 'value'),) expected_result = ((('option',), 'value'),)

View file

@ -68,9 +68,9 @@ def test_apply_logical_validation_raises_if_unknown_repository_in_check_reposito
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
{ {
'location': {'repositories': ['repo.borg', 'other.borg']}, 'repositories': ['repo.borg', 'other.borg'],
'retention': {'keep_secondly': 1000}, 'keep_secondly': 1000,
'consistency': {'check_repositories': ['repo.borg', 'unknown.borg']}, 'check_repositories': ['repo.borg', 'unknown.borg'],
}, },
) )
@ -79,9 +79,9 @@ def test_apply_logical_validation_does_not_raise_if_known_repository_path_in_che
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
{ {
'location': {'repositories': [{'path': 'repo.borg'}, {'path': 'other.borg'}]}, 'repositories': [{'path': 'repo.borg'}, {'path': 'other.borg'}],
'retention': {'keep_secondly': 1000}, 'keep_secondly': 1000,
'consistency': {'check_repositories': ['repo.borg']}, 'check_repositories': ['repo.borg'],
}, },
) )
@ -90,14 +90,12 @@ def test_apply_logical_validation_does_not_raise_if_known_repository_label_in_ch
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
{ {
'location': { 'repositories': [
'repositories': [ {'path': 'repo.borg', 'label': 'my_repo'},
{'path': 'repo.borg', 'label': 'my_repo'}, {'path': 'other.borg', 'label': 'other_repo'},
{'path': 'other.borg', 'label': 'other_repo'}, ],
] 'keep_secondly': 1000,
}, 'check_repositories': ['my_repo'],
'retention': {'keep_secondly': 1000},
'consistency': {'check_repositories': ['my_repo']},
}, },
) )
@ -106,15 +104,15 @@ def test_apply_logical_validation_does_not_raise_if_archive_name_format_and_pref
module.apply_logical_validation( module.apply_logical_validation(
'config.yaml', 'config.yaml',
{ {
'storage': {'archive_name_format': '{hostname}-{now}'}, # noqa: FS003 'archive_name_format': '{hostname}-{now}', # noqa: FS003
'retention': {'prefix': '{hostname}-'}, # noqa: FS003 'prefix': '{hostname}-', # noqa: FS003
'consistency': {'prefix': '{hostname}-'}, # noqa: FS003 'prefix': '{hostname}-', # noqa: FS003
}, },
) )
def test_apply_logical_validation_does_not_raise_otherwise(): def test_apply_logical_validation_does_not_raise_otherwise():
module.apply_logical_validation('config.yaml', {'retention': {'keep_secondly': 1000}}) module.apply_logical_validation('config.yaml', {'keep_secondly': 1000})
def test_normalize_repository_path_passes_through_remote_repository(): def test_normalize_repository_path_passes_through_remote_repository():
@ -157,22 +155,20 @@ def test_guard_configuration_contains_repository_does_not_raise_when_repository_
) )
module.guard_configuration_contains_repository( module.guard_configuration_contains_repository(
repository='repo', configurations={'config.yaml': {'location': {'repositories': ['repo']}}} repository='repo', configurations={'config.yaml': {'repositories': ['repo']}}
) )
def test_guard_configuration_contains_repository_does_not_raise_when_repository_label_in_config(): def test_guard_configuration_contains_repository_does_not_raise_when_repository_label_in_config():
module.guard_configuration_contains_repository( module.guard_configuration_contains_repository(
repository='repo', repository='repo',
configurations={ configurations={'config.yaml': {'repositories': [{'path': 'foo/bar', 'label': 'repo'}]}},
'config.yaml': {'location': {'repositories': [{'path': 'foo/bar', 'label': 'repo'}]}}
},
) )
def test_guard_configuration_contains_repository_does_not_raise_when_repository_not_given(): def test_guard_configuration_contains_repository_does_not_raise_when_repository_not_given():
module.guard_configuration_contains_repository( module.guard_configuration_contains_repository(
repository=None, configurations={'config.yaml': {'location': {'repositories': ['repo']}}} repository=None, configurations={'config.yaml': {'repositories': ['repo']}}
) )
@ -184,7 +180,7 @@ def test_guard_configuration_contains_repository_errors_when_repository_missing_
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.guard_configuration_contains_repository( module.guard_configuration_contains_repository(
repository='nope', repository='nope',
configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, configurations={'config.yaml': {'repositories': ['repo', 'repo2']}},
) )
@ -197,8 +193,8 @@ def test_guard_configuration_contains_repository_errors_when_repository_matches_
module.guard_configuration_contains_repository( module.guard_configuration_contains_repository(
repository='repo', repository='repo',
configurations={ configurations={
'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}, 'config.yaml': {'repositories': ['repo', 'repo2']},
'other.yaml': {'location': {'repositories': ['repo']}}, 'other.yaml': {'repositories': ['repo']},
}, },
) )
@ -207,26 +203,26 @@ def test_guard_single_repository_selected_raises_when_multiple_repositories_conf
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.guard_single_repository_selected( module.guard_single_repository_selected(
repository=None, repository=None,
configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, configurations={'config.yaml': {'repositories': ['repo', 'repo2']}},
) )
def test_guard_single_repository_selected_does_not_raise_when_single_repository_configured_and_none_selected(): def test_guard_single_repository_selected_does_not_raise_when_single_repository_configured_and_none_selected():
module.guard_single_repository_selected( module.guard_single_repository_selected(
repository=None, repository=None,
configurations={'config.yaml': {'location': {'repositories': ['repo']}}}, configurations={'config.yaml': {'repositories': ['repo']}},
) )
def test_guard_single_repository_selected_does_not_raise_when_no_repositories_configured_and_one_selected(): def test_guard_single_repository_selected_does_not_raise_when_no_repositories_configured_and_one_selected():
module.guard_single_repository_selected( module.guard_single_repository_selected(
repository='repo', repository='repo',
configurations={'config.yaml': {'location': {'repositories': []}}}, configurations={'config.yaml': {'repositories': []}},
) )
def test_guard_single_repository_selected_does_not_raise_when_repositories_configured_and_one_selected(): def test_guard_single_repository_selected_does_not_raise_when_repositories_configured_and_one_selected():
module.guard_single_repository_selected( module.guard_single_repository_selected(
repository='repo', repository='repo',
configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, configurations={'config.yaml': {'repositories': ['repo', 'repo2']}},
) )

View file

@ -11,6 +11,7 @@ def test_ping_monitor_rewrites_ping_url_for_start_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -26,6 +27,7 @@ def test_ping_monitor_rewrites_ping_url_and_state_for_start_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -41,6 +43,7 @@ def test_ping_monitor_rewrites_ping_url_for_finish_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.FINISH, module.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -55,7 +58,12 @@ def test_ping_monitor_rewrites_ping_url_for_fail_state():
).and_return(flexmock(ok=True)) ).and_return(flexmock(ok=True))
module.ping_monitor( module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False hook_config,
{},
'config.yaml',
module.monitor.State.FAIL,
monitoring_log_level=1,
dry_run=False,
) )
@ -64,7 +72,12 @@ def test_ping_monitor_dry_run_does_not_hit_ping_url():
flexmock(module.requests).should_receive('get').never() flexmock(module.requests).should_receive('get').never()
module.ping_monitor( module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=True hook_config,
{},
'config.yaml',
module.monitor.State.START,
monitoring_log_level=1,
dry_run=True,
) )
@ -77,6 +90,7 @@ def test_ping_monitor_with_connection_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
(),
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -97,6 +111,7 @@ def test_ping_monitor_with_other_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -104,11 +119,13 @@ def test_ping_monitor_with_other_error_logs_warning():
) )
def test_ping_monitor_with_unsupported_monitoring_state(): def test_ping_monitor_with_unsupported_monitoring_state_bails():
hook_config = {'ping_url': 'https://example.com'} hook_config = {'ping_url': 'https://example.com'}
flexmock(module.requests).should_receive('get').never() flexmock(module.requests).should_receive('get').never()
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.LOG, module.monitor.State.LOG,
monitoring_log_level=1, monitoring_log_level=1,

View file

@ -11,6 +11,7 @@ def test_ping_monitor_hits_ping_url_for_start_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -26,6 +27,7 @@ def test_ping_monitor_hits_ping_url_for_finish_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.FINISH, module.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -40,7 +42,12 @@ def test_ping_monitor_hits_ping_url_for_fail_state():
).and_return(flexmock(ok=True)) ).and_return(flexmock(ok=True))
module.ping_monitor( module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False hook_config,
{},
'config.yaml',
module.monitor.State.FAIL,
monitoring_log_level=1,
dry_run=False,
) )
@ -49,7 +56,12 @@ def test_ping_monitor_dry_run_does_not_hit_ping_url():
flexmock(module.requests).should_receive('get').never() flexmock(module.requests).should_receive('get').never()
module.ping_monitor( module.ping_monitor(
hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=True hook_config,
{},
'config.yaml',
module.monitor.State.START,
monitoring_log_level=1,
dry_run=True,
) )
@ -62,6 +74,7 @@ def test_ping_monitor_with_connection_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -82,6 +95,7 @@ def test_ping_monitor_with_other_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.START, module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -89,11 +103,13 @@ def test_ping_monitor_with_other_error_logs_warning():
) )
def test_ping_monitor_with_unsupported_monitoring_state(): def test_ping_monitor_with_unsupported_monitoring_state_bails():
hook_config = {'ping_url': 'https://example.com'} hook_config = {'ping_url': 'https://example.com'}
flexmock(module.requests).should_receive('get').never() flexmock(module.requests).should_receive('get').never()
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
module.monitor.State.LOG, module.monitor.State.LOG,
monitoring_log_level=1, monitoring_log_level=1,

View file

@ -6,7 +6,7 @@ from flexmock import flexmock
from borgmatic.hooks import dispatch as module from borgmatic.hooks import dispatch as module
def hook_function(config, log_prefix, thing, value): def hook_function(hook_config, config, log_prefix, thing, value):
''' '''
This test function gets mocked out below. This test function gets mocked out below.
''' '''
@ -14,98 +14,104 @@ def hook_function(config, log_prefix, thing, value):
def test_call_hook_invokes_module_function_with_arguments_and_returns_value(): def test_call_hook_invokes_module_function_with_arguments_and_returns_value():
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} config = {'super_hook': flexmock(), 'other_hook': flexmock()}
expected_return_value = flexmock() expected_return_value = flexmock()
test_module = sys.modules[__name__] test_module = sys.modules[__name__]
flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module} flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module}
flexmock(test_module).should_receive('hook_function').with_args( flexmock(test_module).should_receive('hook_function').with_args(
hooks['super_hook'], 'prefix', 55, value=66 config['super_hook'], config, 'prefix', 55, value=66
).and_return(expected_return_value).once() ).and_return(expected_return_value).once()
return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) return_value = module.call_hook('hook_function', config, 'prefix', 'super_hook', 55, value=66)
assert return_value == expected_return_value assert return_value == expected_return_value
def test_call_hook_without_hook_config_invokes_module_function_with_arguments_and_returns_value(): def test_call_hook_without_hook_config_invokes_module_function_with_arguments_and_returns_value():
hooks = {'other_hook': flexmock()} config = {'other_hook': flexmock()}
expected_return_value = flexmock() expected_return_value = flexmock()
test_module = sys.modules[__name__] test_module = sys.modules[__name__]
flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module} flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module}
flexmock(test_module).should_receive('hook_function').with_args( flexmock(test_module).should_receive('hook_function').with_args(
{}, 'prefix', 55, value=66 {}, config, 'prefix', 55, value=66
).and_return(expected_return_value).once() ).and_return(expected_return_value).once()
return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) return_value = module.call_hook('hook_function', config, 'prefix', 'super_hook', 55, value=66)
assert return_value == expected_return_value assert return_value == expected_return_value
def test_call_hook_without_corresponding_module_raises(): def test_call_hook_without_corresponding_module_raises():
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} config = {'super_hook': flexmock(), 'other_hook': flexmock()}
test_module = sys.modules[__name__] test_module = sys.modules[__name__]
flexmock(module).HOOK_NAME_TO_MODULE = {'other_hook': test_module} flexmock(module).HOOK_NAME_TO_MODULE = {'other_hook': test_module}
flexmock(test_module).should_receive('hook_function').never() flexmock(test_module).should_receive('hook_function').never()
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) module.call_hook('hook_function', config, 'prefix', 'super_hook', 55, value=66)
def test_call_hooks_calls_each_hook_and_collects_return_values(): def test_call_hooks_calls_each_hook_and_collects_return_values():
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} config = {'super_hook': flexmock(), 'other_hook': flexmock()}
expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()}
flexmock(module).should_receive('call_hook').and_return( flexmock(module).should_receive('call_hook').and_return(
expected_return_values['super_hook'] expected_return_values['super_hook']
).and_return(expected_return_values['other_hook']) ).and_return(expected_return_values['other_hook'])
return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) return_values = module.call_hooks(
'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55
)
assert return_values == expected_return_values assert return_values == expected_return_values
def test_call_hooks_calls_skips_return_values_for_missing_hooks(): def test_call_hooks_calls_skips_return_values_for_missing_hooks():
hooks = {'super_hook': flexmock()} config = {'super_hook': flexmock()}
expected_return_values = {'super_hook': flexmock()} expected_return_values = {'super_hook': flexmock()}
flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook']) flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook'])
return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) return_values = module.call_hooks(
'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55
)
assert return_values == expected_return_values assert return_values == expected_return_values
def test_call_hooks_calls_skips_return_values_for_null_hooks(): def test_call_hooks_calls_skips_return_values_for_null_hooks():
hooks = {'super_hook': flexmock(), 'other_hook': None} config = {'super_hook': flexmock(), 'other_hook': None}
expected_return_values = {'super_hook': flexmock()} expected_return_values = {'super_hook': flexmock()}
flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook']) flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook'])
return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) return_values = module.call_hooks(
'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55
)
assert return_values == expected_return_values assert return_values == expected_return_values
def test_call_hooks_even_if_unconfigured_calls_each_hook_and_collects_return_values(): def test_call_hooks_even_if_unconfigured_calls_each_hook_and_collects_return_values():
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} config = {'super_hook': flexmock(), 'other_hook': flexmock()}
expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()}
flexmock(module).should_receive('call_hook').and_return( flexmock(module).should_receive('call_hook').and_return(
expected_return_values['super_hook'] expected_return_values['super_hook']
).and_return(expected_return_values['other_hook']) ).and_return(expected_return_values['other_hook'])
return_values = module.call_hooks_even_if_unconfigured( return_values = module.call_hooks_even_if_unconfigured(
'do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55
) )
assert return_values == expected_return_values assert return_values == expected_return_values
def test_call_hooks_even_if_unconfigured_calls_each_hook_configured_or_not_and_collects_return_values(): def test_call_hooks_even_if_unconfigured_calls_each_hook_configured_or_not_and_collects_return_values():
hooks = {'other_hook': flexmock()} config = {'other_hook': flexmock()}
expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()}
flexmock(module).should_receive('call_hook').and_return( flexmock(module).should_receive('call_hook').and_return(
expected_return_values['super_hook'] expected_return_values['super_hook']
).and_return(expected_return_values['other_hook']) ).and_return(expected_return_values['other_hook'])
return_values = module.call_hooks_even_if_unconfigured( return_values = module.call_hooks_even_if_unconfigured(
'do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55 'do_stuff', config, 'prefix', ('super_hook', 'other_hook'), 55
) )
assert return_values == expected_return_values assert return_values == expected_return_values

View file

@ -143,6 +143,7 @@ def test_ping_monitor_hits_ping_url_for_start_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.START, state=module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -160,6 +161,7 @@ def test_ping_monitor_hits_ping_url_for_finish_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.FINISH, state=module.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -177,6 +179,7 @@ def test_ping_monitor_hits_ping_url_for_fail_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.FAIL, state=module.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -194,6 +197,7 @@ def test_ping_monitor_hits_ping_url_for_log_state():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.LOG, state=module.monitor.State.LOG,
monitoring_log_level=1, monitoring_log_level=1,
@ -213,6 +217,7 @@ def test_ping_monitor_with_ping_uuid_hits_corresponding_url():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.FINISH, state=module.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -230,6 +235,7 @@ def test_ping_monitor_skips_ssl_verification_when_verify_tls_false():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.FINISH, state=module.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -247,6 +253,7 @@ def test_ping_monitor_executes_ssl_verification_when_verify_tls_true():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.FINISH, state=module.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -261,6 +268,7 @@ def test_ping_monitor_dry_run_does_not_hit_ping_url():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.START, state=module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -275,6 +283,7 @@ def test_ping_monitor_does_not_hit_ping_url_when_states_not_matching():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.START, state=module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -291,6 +300,7 @@ def test_ping_monitor_hits_ping_url_when_states_matching():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.START, state=module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -308,6 +318,7 @@ def test_ping_monitor_with_connection_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.START, state=module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -329,6 +340,7 @@ def test_ping_monitor_with_other_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
state=module.monitor.State.START, state=module.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,

View file

@ -22,7 +22,7 @@ def test_dump_databases_runs_mongodump_for_each_database():
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_with_dry_run_skips_mongodump(): def test_dump_databases_with_dry_run_skips_mongodump():
@ -34,7 +34,7 @@ def test_dump_databases_with_dry_run_skips_mongodump():
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never() flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
def test_dump_databases_runs_mongodump_with_hostname_and_port(): def test_dump_databases_runs_mongodump_with_hostname_and_port():
@ -63,7 +63,7 @@ def test_dump_databases_runs_mongodump_with_hostname_and_port():
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_username_and_password(): def test_dump_databases_runs_mongodump_with_username_and_password():
@ -101,7 +101,7 @@ def test_dump_databases_runs_mongodump_with_username_and_password():
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_mongodump_with_directory_format(): def test_dump_databases_runs_mongodump_with_directory_format():
@ -118,7 +118,7 @@ def test_dump_databases_runs_mongodump_with_directory_format():
shell=True, shell=True,
).and_return(flexmock()).once() ).and_return(flexmock()).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == []
def test_dump_databases_runs_mongodump_with_options(): def test_dump_databases_runs_mongodump_with_options():
@ -136,7 +136,7 @@ def test_dump_databases_runs_mongodump_with_options():
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_runs_mongodumpall_for_all_databases(): def test_dump_databases_runs_mongodumpall_for_all_databases():
@ -154,7 +154,7 @@ def test_dump_databases_runs_mongodumpall_for_all_databases():
run_to_completion=False, run_to_completion=False,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_restore_database_dump_runs_mongorestore(): def test_restore_database_dump_runs_mongorestore():
@ -172,8 +172,8 @@ def test_restore_database_dump_runs_mongorestore():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -196,8 +196,8 @@ def test_restore_database_dump_errors_on_multiple_database_config():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=flexmock(), extract_process=flexmock(),
connection_params={ connection_params={
@ -236,8 +236,8 @@ def test_restore_database_dump_runs_mongorestore_with_hostname_and_port():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -284,8 +284,8 @@ def test_restore_database_dump_runs_mongorestore_with_username_and_password():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -340,8 +340,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -396,8 +396,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -424,8 +424,8 @@ def test_restore_database_dump_runs_mongorestore_with_options():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -462,8 +462,8 @@ def test_restore_databases_dump_runs_mongorestore_with_schemas():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -490,8 +490,8 @@ def test_restore_database_dump_runs_psql_for_all_database_dump():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -512,8 +512,8 @@ def test_restore_database_dump_with_dry_run_skips_restore():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=True, dry_run=True,
extract_process=flexmock(), extract_process=flexmock(),
connection_params={ connection_params={
@ -539,8 +539,8 @@ def test_restore_database_dump_without_extract_process_restores_from_disk():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=None, extract_process=None,
connection_params={ connection_params={

View file

@ -63,7 +63,7 @@ def test_dump_databases_dumps_each_database():
dry_run_label=object, dry_run_label=object,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
def test_dump_databases_dumps_with_password(): def test_dump_databases_dumps_with_password():
@ -84,7 +84,7 @@ def test_dump_databases_dumps_with_password():
dry_run_label=object, dry_run_label=object,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases([database], 'test.yaml', {}, dry_run=False) == [process] assert module.dump_databases([database], {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_at_once(): def test_dump_databases_dumps_all_databases_at_once():
@ -102,7 +102,7 @@ def test_dump_databases_dumps_all_databases_at_once():
dry_run_label=object, dry_run_label=object,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_separately_when_format_configured(): def test_dump_databases_dumps_all_databases_separately_when_format_configured():
@ -122,7 +122,7 @@ def test_dump_databases_dumps_all_databases_separately_when_format_configured():
dry_run_label=object, dry_run_label=object,
).and_return(process).once() ).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False) == processes
def test_database_names_to_dump_runs_mysql_with_list_options(): def test_database_names_to_dump_runs_mysql_with_list_options():
@ -365,7 +365,7 @@ def test_dump_databases_errors_for_missing_all_databases():
flexmock(module).should_receive('database_names_to_dump').and_return(()) flexmock(module).should_receive('database_names_to_dump').and_return(())
with pytest.raises(ValueError): with pytest.raises(ValueError):
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) assert module.dump_databases(databases, {}, 'test.yaml', dry_run=False)
def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run(): def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run():
@ -376,7 +376,7 @@ def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run():
) )
flexmock(module).should_receive('database_names_to_dump').and_return(()) flexmock(module).should_receive('database_names_to_dump').and_return(())
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] assert module.dump_databases(databases, {}, 'test.yaml', dry_run=True) == []
def test_restore_database_dump_runs_mysql_to_restore(): def test_restore_database_dump_runs_mysql_to_restore():
@ -393,8 +393,8 @@ def test_restore_database_dump_runs_mysql_to_restore():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -415,8 +415,8 @@ def test_restore_database_dump_errors_on_multiple_database_config():
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=flexmock(), extract_process=flexmock(),
connection_params={ connection_params={
@ -442,8 +442,8 @@ def test_restore_database_dump_runs_mysql_with_options():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -478,8 +478,8 @@ def test_restore_database_dump_runs_mysql_with_hostname_and_port():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -505,8 +505,8 @@ def test_restore_database_dump_runs_mysql_with_username_and_password():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -553,8 +553,8 @@ def test_restore_database_dump_with_connection_params_uses_connection_params_for
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -603,8 +603,8 @@ def test_restore_database_dump_without_connection_params_uses_restore_params_in_
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=False, dry_run=False,
extract_process=extract_process, extract_process=extract_process,
connection_params={ connection_params={
@ -623,8 +623,8 @@ def test_restore_database_dump_with_dry_run_skips_restore():
module.restore_database_dump( module.restore_database_dump(
database_config, database_config,
'test.yaml',
{}, {},
'test.yaml',
dry_run=True, dry_run=True,
extract_process=flexmock(), extract_process=flexmock(),
connection_params={ connection_params={

View file

@ -44,6 +44,7 @@ def test_ping_monitor_minimal_config_hits_hosted_ntfy_on_fail():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -65,6 +66,7 @@ def test_ping_monitor_with_auth_hits_hosted_ntfy_on_fail():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -83,6 +85,7 @@ def test_ping_monitor_auth_with_no_username_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -101,6 +104,7 @@ def test_ping_monitor_auth_with_no_password_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -114,6 +118,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_start():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.START, borgmatic.hooks.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -127,6 +132,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_finish():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FINISH, borgmatic.hooks.monitor.State.FINISH,
monitoring_log_level=1, monitoring_log_level=1,
@ -144,6 +150,7 @@ def test_ping_monitor_minimal_config_hits_selfhosted_ntfy_on_fail():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -157,6 +164,7 @@ def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_fail_dry_run():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -172,6 +180,7 @@ def test_ping_monitor_custom_message_hits_hosted_ntfy_on_fail():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -189,6 +198,7 @@ def test_ping_monitor_custom_state_hits_hosted_ntfy_on_start():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.START, borgmatic.hooks.monitor.State.START,
monitoring_log_level=1, monitoring_log_level=1,
@ -207,6 +217,7 @@ def test_ping_monitor_with_connection_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,
@ -229,6 +240,7 @@ def test_ping_monitor_with_other_error_logs_warning():
module.ping_monitor( module.ping_monitor(
hook_config, hook_config,
{},
'config.yaml', 'config.yaml',
borgmatic.hooks.monitor.State.FAIL, borgmatic.hooks.monitor.State.FAIL,
monitoring_log_level=1, monitoring_log_level=1,

Some files were not shown because too many files have changed in this diff Show more