Optionally dump "all" PostgreSQL databases to separate files instead of one combined dump file (#438, #560).

This commit is contained in:
Dan Helfman 2023-01-25 23:31:07 -08:00
parent 22c750b949
commit 0e6b2c6773
46 changed files with 2697 additions and 990 deletions

2
NEWS
View file

@ -3,7 +3,7 @@
dump file, allowing more convenient restores of individual databases. You can enable this by
specifying the database dump "format" option when the database is named "all".
* #602: Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout.
* #622: Fix traceback when include merging on ARM64.
* #622: Fix traceback when include merging configuration files on ARM64.
* #629: Skip warning about excluded special files when no special files have been excluded.
1.7.5

View file

36
borgmatic/actions/borg.py Normal file
View file

@ -0,0 +1,36 @@
import logging
import borgmatic.borg.borg
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_borg(
repository, storage, local_borg_version, borg_arguments, local_path, remote_path,
):
'''
Run the "borg" action for the given repository.
'''
if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, borg_arguments.repository
):
logger.info('{}: Running arbitrary Borg command'.format(repository))
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository,
borg_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
borgmatic.borg.borg.run_arbitrary_borg(
repository,
storage,
local_borg_version,
options=borg_arguments.options,
archive=archive_name,
local_path=local_path,
remote_path=remote_path,
)

View file

@ -0,0 +1,21 @@
import logging
import borgmatic.borg.break_lock
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_break_lock(
repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path,
):
'''
Run the "break-lock" action for the given repository.
'''
if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, break_lock_arguments.repository
):
logger.info(f'{repository}: Breaking repository and cache locks')
borgmatic.borg.break_lock.break_lock(
repository, storage, local_borg_version, local_path=local_path, remote_path=remote_path,
)

View file

@ -0,0 +1,55 @@
import logging
import borgmatic.borg.check
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_check(
config_filename,
repository,
location,
storage,
consistency,
hooks,
hook_context,
local_borg_version,
check_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "check" action for the given repository.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_check'),
hooks.get('umask'),
config_filename,
'pre-check',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Running consistency checks'.format(repository))
borgmatic.borg.check.check_archives(
repository,
location,
storage,
consistency,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=check_arguments.progress,
repair=check_arguments.repair,
only_checks=check_arguments.only,
force=check_arguments.force,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_check'),
hooks.get('umask'),
config_filename,
'post-check',
global_arguments.dry_run,
**hook_context,
)

View file

@ -0,0 +1,57 @@
import logging
import borgmatic.borg.compact
import borgmatic.borg.feature
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_compact(
config_filename,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
compact_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
):
'''
Run the "compact" action for the given repository.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_compact'),
hooks.get('umask'),
config_filename,
'pre-compact',
global_arguments.dry_run,
**hook_context,
)
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
borgmatic.borg.compact.compact_segments(
global_arguments.dry_run,
repository,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=compact_arguments.progress,
cleanup_commits=compact_arguments.cleanup_commits,
threshold=compact_arguments.threshold,
)
else: # pragma: nocover
logger.info('{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository))
borgmatic.hooks.command.execute_hook(
hooks.get('after_compact'),
hooks.get('umask'),
config_filename,
'post-compact',
global_arguments.dry_run,
**hook_context,
)

View file

@ -0,0 +1,90 @@
import json
import logging
import borgmatic.borg.create
import borgmatic.hooks.command
import borgmatic.hooks.dispatch
import borgmatic.hooks.dump
logger = logging.getLogger(__name__)
def run_create(
config_filename,
repository,
location,
storage,
hooks,
hook_context,
local_borg_version,
create_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
):
'''
Run the "create" action for the given repository.
If create_arguments.json is True, yield the JSON output from creating the archive.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_backup'),
hooks.get('umask'),
config_filename,
'pre-backup',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
active_dumps = borgmatic.hooks.dispatch.call_hooks(
'dump_databases',
hooks,
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borgmatic.borg.create.create_archive(
global_arguments.dry_run,
repository,
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=create_arguments.progress,
stats=create_arguments.stats,
json=create_arguments.json,
list_files=create_arguments.list_files,
stream_processes=stream_processes,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config_filename,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_backup'),
hooks.get('umask'),
config_filename,
'post-backup',
global_arguments.dry_run,
**hook_context,
)

View file

@ -0,0 +1,48 @@
import logging
import borgmatic.borg.export_tar
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_export_tar(
repository,
storage,
local_borg_version,
export_tar_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "export-tar" action for the given repository.
'''
if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, export_tar_arguments.repository
):
logger.info(
'{}: Exporting archive {} as tar file'.format(repository, export_tar_arguments.archive)
)
borgmatic.borg.export_tar.export_tar_archive(
global_arguments.dry_run,
repository,
borgmatic.borg.rlist.resolve_archive_name(
repository,
export_tar_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
),
export_tar_arguments.paths,
export_tar_arguments.destination,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
tar_filter=export_tar_arguments.tar_filter,
list_files=export_tar_arguments.list_files,
strip_components=export_tar_arguments.strip_components,
)

View file

@ -0,0 +1,67 @@
import logging
import borgmatic.borg.extract
import borgmatic.borg.rlist
import borgmatic.config.validate
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_extract(
config_filename,
repository,
location,
storage,
hooks,
hook_context,
local_borg_version,
extract_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "extract" action for the given repository.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_extract'),
hooks.get('umask'),
config_filename,
'pre-extract',
global_arguments.dry_run,
**hook_context,
)
if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, extract_arguments.repository
):
logger.info('{}: Extracting archive {}'.format(repository, extract_arguments.archive))
borgmatic.borg.extract.extract_archive(
global_arguments.dry_run,
repository,
borgmatic.borg.rlist.resolve_archive_name(
repository,
extract_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
),
extract_arguments.paths,
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path=extract_arguments.destination,
strip_components=extract_arguments.strip_components,
progress=extract_arguments.progress,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_extract'),
hooks.get('umask'),
config_filename,
'post-extract',
global_arguments.dry_run,
**hook_context,
)

41
borgmatic/actions/info.py Normal file
View file

@ -0,0 +1,41 @@
import json
import logging
import borgmatic.borg.info
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_info(
repository, storage, local_borg_version, info_arguments, local_path, remote_path,
):
'''
Run the "info" action for the given repository and archive.
If info_arguments.json is True, yield the JSON output from the info for the archive.
'''
if info_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, info_arguments.repository
):
if not info_arguments.json: # pragma: nocover
logger.answer(f'{repository}: Displaying archive summary information')
info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
repository,
info_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
json_output = borgmatic.borg.info.display_archives_info(
repository,
storage,
local_borg_version,
info_arguments=info_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

43
borgmatic/actions/list.py Normal file
View file

@ -0,0 +1,43 @@
import json
import logging
import borgmatic.borg.list
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_list(
repository, storage, local_borg_version, list_arguments, local_path, remote_path,
):
'''
Run the "list" action for the given repository and archive.
If list_arguments.json is True, yield the JSON output from listing the archive.
'''
if list_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, list_arguments.repository
):
if not list_arguments.json: # pragma: nocover
if list_arguments.find_paths:
logger.answer(f'{repository}: Searching archives')
elif not list_arguments.archive:
logger.answer(f'{repository}: Listing archives')
list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name(
repository,
list_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
)
json_output = borgmatic.borg.list.list_archive(
repository,
storage,
local_borg_version,
list_arguments=list_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

View file

@ -0,0 +1,42 @@
import logging
import borgmatic.borg.mount
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_mount(
repository, storage, local_borg_version, mount_arguments, local_path, remote_path,
):
'''
Run the "mount" action for the given repository.
'''
if mount_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, mount_arguments.repository
):
if mount_arguments.archive:
logger.info('{}: Mounting archive {}'.format(repository, mount_arguments.archive))
else: # pragma: nocover
logger.info('{}: Mounting repository'.format(repository))
borgmatic.borg.mount.mount_archive(
repository,
borgmatic.borg.rlist.resolve_archive_name(
repository,
mount_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
),
mount_arguments.mount_point,
mount_arguments.paths,
mount_arguments.foreground,
mount_arguments.options,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)

View file

@ -0,0 +1,53 @@
import logging
import borgmatic.borg.prune
import borgmatic.hooks.command
logger = logging.getLogger(__name__)
def run_prune(
config_filename,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
prune_arguments,
global_arguments,
dry_run_label,
local_path,
remote_path,
):
'''
Run the "prune" action for the given repository.
'''
borgmatic.hooks.command.execute_hook(
hooks.get('before_prune'),
hooks.get('umask'),
config_filename,
'pre-prune',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
borgmatic.borg.prune.prune_archives(
global_arguments.dry_run,
repository,
storage,
retention,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
stats=prune_arguments.stats,
list_archives=prune_arguments.list_archives,
)
borgmatic.hooks.command.execute_hook(
hooks.get('after_prune'),
hooks.get('umask'),
config_filename,
'post-prune',
global_arguments.dry_run,
**hook_context,
)

View file

@ -0,0 +1,34 @@
import logging
import borgmatic.borg.rcreate
logger = logging.getLogger(__name__)
def run_rcreate(
repository,
storage,
local_borg_version,
rcreate_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "rcreate" action for the given repository.
'''
logger.info('{}: Creating repository'.format(repository))
borgmatic.borg.rcreate.create_repository(
global_arguments.dry_run,
repository,
storage,
local_borg_version,
rcreate_arguments.encryption_mode,
rcreate_arguments.source_repository,
rcreate_arguments.copy_crypt_key,
rcreate_arguments.append_only,
rcreate_arguments.storage_quota,
rcreate_arguments.make_parent_dirs,
local_path=local_path,
remote_path=remote_path,
)

View file

@ -0,0 +1,345 @@
import copy
import logging
import os
import borgmatic.borg.extract
import borgmatic.borg.list
import borgmatic.borg.mount
import borgmatic.borg.rlist
import borgmatic.borg.state
import borgmatic.config.validate
import borgmatic.hooks.dispatch
import borgmatic.hooks.dump
logger = logging.getLogger(__name__)
UNSPECIFIED_HOOK = object()
def get_configured_database(
hooks, archive_database_names, hook_name, database_name, configuration_database_name=None
):
'''
Find the first database with the given hook name and database name in the configured hooks
dict and the given archive database names dict (from hook name to database names contained in
a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database
hooks for the named database. If a configuration database name is given, use that instead of the
database name to lookup the database in the given hooks configuration.
Return the found database as a tuple of (found hook name, database configuration dict).
'''
if not configuration_database_name:
configuration_database_name = database_name
if hook_name == UNSPECIFIED_HOOK:
hooks_to_search = hooks
else:
hooks_to_search = {hook_name: hooks[hook_name]}
return next(
(
(name, hook_database)
for (name, hook) in hooks_to_search.items()
for hook_database in hook
if hook_database['name'] == configuration_database_name
and database_name in archive_database_names.get(name, [])
),
(None, None),
)
def get_configured_hook_name_and_database(hooks, database_name):
'''
Find the hook name and first database dict with the given database name in the configured hooks
dict. This searches across all database hooks.
'''
def restore_single_database(
repository,
location,
storage,
hooks,
local_borg_version,
global_arguments,
local_path,
remote_path,
archive_name,
hook_name,
database,
): # pragma: no cover
'''
Given (among other things) an archive name, a database hook name, and a configured database
configuration dict, restore that database from the archive.
'''
logger.info(f'{repository}: Restoring database {database["name"]}')
dump_pattern = borgmatic.hooks.dispatch.call_hooks(
'make_database_dump_pattern',
hooks,
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
database['name'],
)[hook_name]
# Kick off a single database extract to stdout.
extract_process = borgmatic.borg.extract.extract_archive(
dry_run=global_arguments.dry_run,
repository=repository,
archive=archive_name,
paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location,
storage_config=storage,
local_borg_version=local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path='/',
# A directory format dump isn't a single file, and therefore can't extract
# to stdout. In this case, the extract_process return value is None.
extract_to_stdout=bool(database.get('format') != 'directory'),
)
# Run a single database restore, consuming the extract stdout (if any).
borgmatic.hooks.dispatch.call_hooks(
'restore_database_dump',
{hook_name: [database]},
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
extract_process,
)
def collect_archive_database_names(
repository, archive, location, storage, local_borg_version, local_path, remote_path,
):
'''
Given a local or remote repository path, a resolved archive name, a location configuration dict,
a storage configuration dict, the local Borg version, and local and remote Borg paths, query the
archive for the names of databases it contains and return them as a dict from hook name to a
sequence of database names.
'''
borgmatic_source_directory = os.path.expanduser(
location.get(
'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
)
).lstrip('/')
parent_dump_path = os.path.expanduser(
borgmatic.hooks.dump.make_database_dump_path(borgmatic_source_directory, '*_databases/*/*')
)
dump_paths = borgmatic.borg.list.capture_archive_listing(
repository,
archive,
storage,
local_borg_version,
list_path=parent_dump_path,
local_path=local_path,
remote_path=remote_path,
)
# Determine the database names corresponding to the dumps found in the archive and
# add them to restore_names.
archive_database_names = {}
for dump_path in dump_paths:
try:
(hook_name, _, database_name) = dump_path.split(
borgmatic_source_directory + os.path.sep, 1
)[1].split(os.path.sep)[0:3]
except (ValueError, IndexError):
logger.warning(
f'{repository}: Ignoring invalid database dump path "{dump_path}" in archive {archive}'
)
else:
if database_name not in archive_database_names.get(hook_name, []):
archive_database_names.setdefault(hook_name, []).extend([database_name])
return archive_database_names
def find_databases_to_restore(requested_database_names, archive_database_names):
'''
Given a sequence of requested database names to restore and a dict of hook name to the names of
databases found in an archive, return an expanded sequence of database names to restore,
replacing "all" with actual database names as appropriate.
Raise ValueError if any of the requested database names cannot be found in the archive.
'''
# A map from database hook name to the database names to restore for that hook.
restore_names = (
{UNSPECIFIED_HOOK: requested_database_names}
if requested_database_names
else {UNSPECIFIED_HOOK: ['all']}
)
# If "all" is in restore_names, then replace it with the names of dumps found within the
# archive.
if 'all' in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove('all')
for (hook_name, database_names) in archive_database_names.items():
restore_names.setdefault(hook_name, []).extend(database_names)
# If a database is to be restored as part of "all", then remove it from restore names so
# it doesn't get restored twice.
for database_name in database_names:
if database_name in restore_names[UNSPECIFIED_HOOK]:
restore_names[UNSPECIFIED_HOOK].remove(database_name)
if not restore_names[UNSPECIFIED_HOOK]:
restore_names.pop(UNSPECIFIED_HOOK)
combined_restore_names = set(
name for database_names in restore_names.values() for name in database_names
)
combined_archive_database_names = set(
name for database_names in archive_database_names.values() for name in database_names
)
missing_names = sorted(set(combined_restore_names) - combined_archive_database_names)
if missing_names:
joined_names = ', '.join(f'"{name}"' for name in missing_names)
raise ValueError(
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from archive"
)
return restore_names
def ensure_databases_found(restore_names, remaining_restore_names, found_names):
'''
Given a dict from hook name to database names to restore, a dict from hook name to remaining
database names to restore, and a sequence of found (actually restored) database names, raise
ValueError if requested databases to restore were missing from the archive and/or configuration.
'''
combined_restore_names = set(
name
for database_names in tuple(restore_names.values())
+ tuple(remaining_restore_names.values())
for name in database_names
)
if not combined_restore_names and not found_names:
raise ValueError('No databases were found to restore')
missing_names = sorted(set(combined_restore_names) - set(found_names))
if missing_names:
joined_names = ', '.join(f'"{name}"' for name in missing_names)
raise ValueError(
f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from borgmatic's configuration"
)
def run_restore(
repository,
location,
storage,
hooks,
local_borg_version,
restore_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "restore" action for the given repository, but only if the repository matches the
requested repository in restore arguments.
Raise ValueError if a configured database could not be found to restore.
'''
if restore_arguments.repository and not borgmatic.config.validate.repositories_match(
repository, restore_arguments.repository
):
return
logger.info(
'{}: Restoring databases from archive {}'.format(repository, restore_arguments.archive)
)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
archive_name = borgmatic.borg.rlist.resolve_archive_name(
repository, restore_arguments.archive, storage, local_borg_version, local_path, remote_path,
)
archive_database_names = collect_archive_database_names(
repository, archive_name, location, storage, local_borg_version, local_path, remote_path,
)
restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names)
found_names = set()
remaining_restore_names = {}
for hook_name, database_names in restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name
)
if not found_database:
remaining_restore_names.setdefault(found_hook_name or hook_name, []).append(
database_name
)
continue
found_names.add(database_name)
restore_single_database(
repository,
location,
storage,
hooks,
local_borg_version,
global_arguments,
local_path,
remote_path,
archive_name,
found_hook_name or hook_name,
found_database,
)
# For any database that weren't found via exact matches in the hooks configuration, try to
# fallback to "all" entries.
for hook_name, database_names in remaining_restore_names.items():
for database_name in database_names:
found_hook_name, found_database = get_configured_database(
hooks, archive_database_names, hook_name, database_name, 'all'
)
if not found_database:
continue
found_names.add(database_name)
database = copy.copy(found_database)
database['name'] = database_name
restore_single_database(
repository,
location,
storage,
hooks,
local_borg_version,
global_arguments,
local_path,
remote_path,
archive_name,
found_hook_name or hook_name,
database,
)
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
borgmatic.hooks.dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
ensure_databases_found(restore_names, remaining_restore_names, found_names)

View file

@ -0,0 +1,32 @@
import json
import logging
import borgmatic.borg.rinfo
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_rinfo(
repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path,
):
'''
Run the "rinfo" action for the given repository.
If rinfo_arguments.json is True, yield the JSON output from the info for the repository.
'''
if rinfo_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, rinfo_arguments.repository
):
if not rinfo_arguments.json: # pragma: nocover
logger.answer('{}: Displaying repository summary information'.format(repository))
json_output = borgmatic.borg.rinfo.display_repository_info(
repository,
storage,
local_borg_version,
rinfo_arguments=rinfo_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

View file

@ -0,0 +1,32 @@
import json
import logging
import borgmatic.borg.rlist
import borgmatic.config.validate
logger = logging.getLogger(__name__)
def run_rlist(
repository, storage, local_borg_version, rlist_arguments, local_path, remote_path,
):
'''
Run the "rlist" action for the given repository.
If rlist_arguments.json is True, yield the JSON output from listing the repository.
'''
if rlist_arguments.repository is None or borgmatic.config.validate.repositories_match(
repository, rlist_arguments.repository
):
if not rlist_arguments.json: # pragma: nocover
logger.answer('{}: Listing repository'.format(repository))
json_output = borgmatic.borg.rlist.list_repository(
repository,
storage,
local_borg_version,
rlist_arguments=rlist_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)

View file

@ -0,0 +1,29 @@
import logging
import borgmatic.borg.transfer
logger = logging.getLogger(__name__)
def run_transfer(
repository,
storage,
local_borg_version,
transfer_arguments,
global_arguments,
local_path,
remote_path,
):
'''
Run the "transfer" action for the given repository.
'''
logger.info(f'{repository}: Transferring archives to repository')
borgmatic.borg.transfer.transfer_archives(
global_arguments.dry_run,
repository,
storage,
local_borg_version,
transfer_arguments,
local_path=local_path,
remote_path=remote_path,
)

View file

@ -85,6 +85,46 @@ def make_find_paths(find_paths):
)
def capture_archive_listing(
repository,
archive,
storage_config,
local_borg_version,
list_path=None,
local_path='borg',
remote_path=None,
):
'''
Given a local or remote repository path, an archive name, a storage config dict, the local Borg
version, the archive path in which to list files, and local and remote Borg paths, capture the
output of listing that archive and return it as a list of file paths.
'''
borg_environment = environment.make_environment(storage_config)
return tuple(
execute_command_and_capture_output(
make_list_command(
repository,
storage_config,
local_borg_version,
argparse.Namespace(
repository=repository,
archive=archive,
paths=[f'sh:{list_path}'],
find_paths=None,
json=None,
format='{path}{NL}',
),
local_path,
remote_path,
),
extra_environment=borg_environment,
)
.strip('\n')
.split('\n')
)
def list_archive(
repository,
storage_config,

View file

@ -1,5 +1,4 @@
import collections
import copy
import json
import logging
import os
@ -11,28 +10,28 @@ from subprocess import CalledProcessError
import colorama
import pkg_resources
import borgmatic.actions.borg
import borgmatic.actions.break_lock
import borgmatic.actions.check
import borgmatic.actions.compact
import borgmatic.actions.create
import borgmatic.actions.export_tar
import borgmatic.actions.extract
import borgmatic.actions.info
import borgmatic.actions.list
import borgmatic.actions.mount
import borgmatic.actions.prune
import borgmatic.actions.rcreate
import borgmatic.actions.restore
import borgmatic.actions.rinfo
import borgmatic.actions.rlist
import borgmatic.actions.transfer
import borgmatic.commands.completion
from borgmatic.borg import borg as borg_borg
from borgmatic.borg import break_lock as borg_break_lock
from borgmatic.borg import check as borg_check
from borgmatic.borg import compact as borg_compact
from borgmatic.borg import create as borg_create
from borgmatic.borg import export_tar as borg_export_tar
from borgmatic.borg import extract as borg_extract
from borgmatic.borg import feature as borg_feature
from borgmatic.borg import info as borg_info
from borgmatic.borg import list as borg_list
from borgmatic.borg import mount as borg_mount
from borgmatic.borg import prune as borg_prune
from borgmatic.borg import rcreate as borg_rcreate
from borgmatic.borg import rinfo as borg_rinfo
from borgmatic.borg import rlist as borg_rlist
from borgmatic.borg import transfer as borg_transfer
from borgmatic.borg import umount as borg_umount
from borgmatic.borg import version as borg_version
from borgmatic.commands.arguments import parse_arguments
from borgmatic.config import checks, collect, convert, validate
from borgmatic.hooks import command, dispatch, dump, monitor
from borgmatic.hooks import command, dispatch, monitor
from borgmatic.logger import add_custom_log_levels, configure_logging, should_do_markup
from borgmatic.signals import configure_signals
from borgmatic.verbosity import verbosity_to_log_level
@ -264,508 +263,153 @@ def run_actions(
)
if 'rcreate' in arguments:
logger.info('{}: Creating repository'.format(repository))
borg_rcreate.create_repository(
global_arguments.dry_run,
borgmatic.actions.rcreate.run_rcreate(
repository,
storage,
local_borg_version,
arguments['rcreate'].encryption_mode,
arguments['rcreate'].source_repository,
arguments['rcreate'].copy_crypt_key,
arguments['rcreate'].append_only,
arguments['rcreate'].storage_quota,
arguments['rcreate'].make_parent_dirs,
local_path=local_path,
remote_path=remote_path,
arguments['rcreate'],
global_arguments,
local_path,
remote_path,
)
if 'transfer' in arguments:
logger.info(f'{repository}: Transferring archives to repository')
borg_transfer.transfer_archives(
global_arguments.dry_run,
borgmatic.actions.transfer.run_transfer(
repository,
storage,
local_borg_version,
transfer_arguments=arguments['transfer'],
local_path=local_path,
remote_path=remote_path,
arguments['transfer'],
global_arguments,
local_path,
remote_path,
)
if 'prune' in arguments:
command.execute_hook(
hooks.get('before_prune'),
hooks.get('umask'),
borgmatic.actions.prune.run_prune(
config_filename,
'pre-prune',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
borg_prune.prune_archives(
global_arguments.dry_run,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
stats=arguments['prune'].stats,
list_archives=arguments['prune'].list_archives,
)
command.execute_hook(
hooks.get('after_prune'),
hooks.get('umask'),
config_filename,
'post-prune',
global_arguments.dry_run,
**hook_context,
arguments['prune'],
global_arguments,
dry_run_label,
local_path,
remote_path,
)
if 'compact' in arguments:
command.execute_hook(
hooks.get('before_compact'),
hooks.get('umask'),
borgmatic.actions.compact.run_compact(
config_filename,
'pre-compact',
global_arguments.dry_run,
)
if borg_feature.available(borg_feature.Feature.COMPACT, local_borg_version):
logger.info('{}: Compacting segments{}'.format(repository, dry_run_label))
borg_compact.compact_segments(
global_arguments.dry_run,
repository,
storage,
retention,
hooks,
hook_context,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=arguments['compact'].progress,
cleanup_commits=arguments['compact'].cleanup_commits,
threshold=arguments['compact'].threshold,
)
else: # pragma: nocover
logger.info(
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)
)
command.execute_hook(
hooks.get('after_compact'),
hooks.get('umask'),
config_filename,
'post-compact',
global_arguments.dry_run,
arguments['compact'],
global_arguments,
dry_run_label,
local_path,
remote_path,
)
if 'create' in arguments:
command.execute_hook(
hooks.get('before_backup'),
hooks.get('umask'),
yield from borgmatic.actions.create.run_create(
config_filename,
'pre-backup',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
active_dumps = dispatch.call_hooks(
'dump_databases',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
stream_processes = [process for processes in active_dumps.values() for process in processes]
json_output = borg_create.create_archive(
global_arguments.dry_run,
repository,
location,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=arguments['create'].progress,
stats=arguments['create'].stats,
json=arguments['create'].json,
list_files=arguments['create'].list_files,
stream_processes=stream_processes,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
config_filename,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
hook_context,
local_borg_version,
arguments['create'],
global_arguments,
dry_run_label,
local_path,
remote_path,
)
command.execute_hook(
hooks.get('after_backup'),
hooks.get('umask'),
config_filename,
'post-backup',
global_arguments.dry_run,
**hook_context,
)
if 'check' in arguments and checks.repository_enabled_for_checks(repository, consistency):
command.execute_hook(
hooks.get('before_check'),
hooks.get('umask'),
borgmatic.actions.check.run_check(
config_filename,
'pre-check',
global_arguments.dry_run,
**hook_context,
)
logger.info('{}: Running consistency checks'.format(repository))
borg_check.check_archives(
repository,
location,
storage,
consistency,
hooks,
hook_context,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
progress=arguments['check'].progress,
repair=arguments['check'].repair,
only_checks=arguments['check'].only,
force=arguments['check'].force,
)
command.execute_hook(
hooks.get('after_check'),
hooks.get('umask'),
config_filename,
'post-check',
global_arguments.dry_run,
**hook_context,
arguments['check'],
global_arguments,
local_path,
remote_path,
)
if 'extract' in arguments:
command.execute_hook(
hooks.get('before_extract'),
hooks.get('umask'),
borgmatic.actions.extract.run_extract(
config_filename,
'pre-extract',
global_arguments.dry_run,
**hook_context,
)
if arguments['extract'].repository is None or validate.repositories_match(
repository, arguments['extract'].repository
):
logger.info(
'{}: Extracting archive {}'.format(repository, arguments['extract'].archive)
)
borg_extract.extract_archive(
global_arguments.dry_run,
repository,
borg_rlist.resolve_archive_name(
repository,
arguments['extract'].archive,
storage,
local_borg_version,
local_path,
remote_path,
),
arguments['extract'].paths,
location,
storage,
hooks,
hook_context,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path=arguments['extract'].destination,
strip_components=arguments['extract'].strip_components,
progress=arguments['extract'].progress,
)
command.execute_hook(
hooks.get('after_extract'),
hooks.get('umask'),
config_filename,
'post-extract',
global_arguments.dry_run,
**hook_context,
arguments['extract'],
global_arguments,
local_path,
remote_path,
)
if 'export-tar' in arguments:
if arguments['export-tar'].repository is None or validate.repositories_match(
repository, arguments['export-tar'].repository
):
logger.info(
'{}: Exporting archive {} as tar file'.format(
repository, arguments['export-tar'].archive
)
)
borg_export_tar.export_tar_archive(
global_arguments.dry_run,
borgmatic.actions.export_tar.run_export_tar(
repository,
borg_rlist.resolve_archive_name(
repository,
arguments['export-tar'].archive,
storage,
local_borg_version,
arguments['export-tar'],
global_arguments,
local_path,
remote_path,
),
arguments['export-tar'].paths,
arguments['export-tar'].destination,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
tar_filter=arguments['export-tar'].tar_filter,
list_files=arguments['export-tar'].list_files,
strip_components=arguments['export-tar'].strip_components,
)
if 'mount' in arguments:
if arguments['mount'].repository is None or validate.repositories_match(
repository, arguments['mount'].repository
):
if arguments['mount'].archive:
logger.info(
'{}: Mounting archive {}'.format(repository, arguments['mount'].archive)
borgmatic.actions.mount.run_mount(
repository, storage, local_borg_version, arguments['mount'], local_path, remote_path,
)
else: # pragma: nocover
logger.info('{}: Mounting repository'.format(repository))
borg_mount.mount_archive(
if 'restore' in arguments:
borgmatic.actions.restore.run_restore(
repository,
borg_rlist.resolve_archive_name(
repository,
arguments['mount'].archive,
location,
storage,
hooks,
local_borg_version,
arguments['restore'],
global_arguments,
local_path,
remote_path,
),
arguments['mount'].mount_point,
arguments['mount'].paths,
arguments['mount'].foreground,
arguments['mount'].options,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)
if 'restore' in arguments: # pragma: nocover
if arguments['restore'].repository is None or validate.repositories_match(
repository, arguments['restore'].repository
):
logger.info(
'{}: Restoring databases from archive {}'.format(
repository, arguments['restore'].archive
)
)
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
restore_names = arguments['restore'].databases or []
if 'all' in restore_names:
restore_names = []
archive_name = borg_rlist.resolve_archive_name(
repository,
arguments['restore'].archive,
storage,
local_borg_version,
local_path,
remote_path,
)
found_names = set()
for hook_name, per_hook_restore_databases in hooks.items():
if hook_name not in dump.DATABASE_HOOK_NAMES:
continue
for restore_database in per_hook_restore_databases:
database_name = restore_database['name']
if restore_names and database_name not in restore_names:
continue
found_names.add(database_name)
dump_pattern = dispatch.call_hooks(
'make_database_dump_pattern',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
database_name,
)[hook_name]
# Kick off a single database extract to stdout.
extract_process = borg_extract.extract_archive(
dry_run=global_arguments.dry_run,
repository=repository,
archive=archive_name,
paths=dump.convert_glob_patterns_to_borg_patterns([dump_pattern]),
location_config=location,
storage_config=storage,
local_borg_version=local_borg_version,
local_path=local_path,
remote_path=remote_path,
destination_path='/',
# A directory format dump isn't a single file, and therefore can't extract
# to stdout. In this case, the extract_process return value is None.
extract_to_stdout=bool(restore_database.get('format') != 'directory'),
)
# Run a single database restore, consuming the extract stdout (if any).
dispatch.call_hooks(
'restore_database_dump',
{hook_name: [restore_database]},
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
extract_process,
)
dispatch.call_hooks_even_if_unconfigured(
'remove_database_dumps',
hooks,
repository,
dump.DATABASE_HOOK_NAMES,
location,
global_arguments.dry_run,
)
if not restore_names and not found_names:
raise ValueError('No databases were found to restore')
missing_names = sorted(set(restore_names) - found_names)
if missing_names:
raise ValueError(
'Cannot restore database(s) {} missing from borgmatic\'s configuration'.format(
', '.join(missing_names)
)
)
if 'rlist' in arguments:
if arguments['rlist'].repository is None or validate.repositories_match(
repository, arguments['rlist'].repository
):
rlist_arguments = copy.copy(arguments['rlist'])
if not rlist_arguments.json: # pragma: nocover
logger.answer('{}: Listing repository'.format(repository))
json_output = borg_rlist.list_repository(
repository,
storage,
local_borg_version,
rlist_arguments=rlist_arguments,
local_path=local_path,
remote_path=remote_path,
yield from borgmatic.actions.rlist.run_rlist(
repository, storage, local_borg_version, arguments['rlist'], local_path, remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'list' in arguments:
if arguments['list'].repository is None or validate.repositories_match(
repository, arguments['list'].repository
):
list_arguments = copy.copy(arguments['list'])
if not list_arguments.json: # pragma: nocover
if list_arguments.find_paths:
logger.answer('{}: Searching archives'.format(repository))
elif not list_arguments.archive:
logger.answer('{}: Listing archives'.format(repository))
list_arguments.archive = borg_rlist.resolve_archive_name(
repository,
list_arguments.archive,
storage,
local_borg_version,
local_path,
remote_path,
yield from borgmatic.actions.list.run_list(
repository, storage, local_borg_version, arguments['list'], local_path, remote_path,
)
json_output = borg_list.list_archive(
repository,
storage,
local_borg_version,
list_arguments=list_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'rinfo' in arguments:
if arguments['rinfo'].repository is None or validate.repositories_match(
repository, arguments['rinfo'].repository
):
rinfo_arguments = copy.copy(arguments['rinfo'])
if not rinfo_arguments.json: # pragma: nocover
logger.answer('{}: Displaying repository summary information'.format(repository))
json_output = borg_rinfo.display_repository_info(
repository,
storage,
local_borg_version,
rinfo_arguments=rinfo_arguments,
local_path=local_path,
remote_path=remote_path,
yield from borgmatic.actions.rinfo.run_rinfo(
repository, storage, local_borg_version, arguments['rinfo'], local_path, remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'info' in arguments:
if arguments['info'].repository is None or validate.repositories_match(
repository, arguments['info'].repository
):
info_arguments = copy.copy(arguments['info'])
if not info_arguments.json: # pragma: nocover
logger.answer('{}: Displaying archive summary information'.format(repository))
info_arguments.archive = borg_rlist.resolve_archive_name(
yield from borgmatic.actions.info.run_info(
repository, storage, local_borg_version, arguments['info'], local_path, remote_path,
)
if 'break-lock' in arguments:
borgmatic.actions.break_lock.run_break_lock(
repository,
info_arguments.archive,
storage,
local_borg_version,
arguments['break-lock'],
local_path,
remote_path,
)
json_output = borg_info.display_archives_info(
repository,
storage,
local_borg_version,
info_arguments=info_arguments,
local_path=local_path,
remote_path=remote_path,
)
if json_output: # pragma: nocover
yield json.loads(json_output)
if 'break-lock' in arguments:
if arguments['break-lock'].repository is None or validate.repositories_match(
repository, arguments['break-lock'].repository
):
logger.info(f'{repository}: Breaking repository and cache locks')
borg_break_lock.break_lock(
repository,
storage,
local_borg_version,
local_path=local_path,
remote_path=remote_path,
)
if 'borg' in arguments:
if arguments['borg'].repository is None or validate.repositories_match(
repository, arguments['borg'].repository
):
logger.info('{}: Running arbitrary Borg command'.format(repository))
archive_name = borg_rlist.resolve_archive_name(
repository,
arguments['borg'].archive,
storage,
local_borg_version,
local_path,
remote_path,
)
borg_borg.run_arbitrary_borg(
repository,
storage,
local_borg_version,
options=arguments['borg'].options,
archive=archive_name,
local_path=local_path,
remote_path=remote_path,
borgmatic.actions.borg.run_borg(
repository, storage, local_borg_version, arguments['borg'], local_path, remote_path,
)
command.execute_hook(

View file

@ -855,6 +855,19 @@ properties:
configured to trust the configured username
without a password.
example: trustsome1
format:
type: string
enum: ['sql']
description: |
Database dump output format. Currenly only "sql"
is supported. Defaults to "sql" for a single
database. Or, when database name is "all" and
format is blank, dumps all databases to a single
file. But if a format is specified with an "all"
database name, dumps each database to a separate
file of that format, allowing more convenient
restores of individual databases.
example: directory
list_options:
type: string
description: |

View file

@ -49,7 +49,8 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
'''
Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each
process with the requested log level. Additionally, raise a CalledProcessError if a process
exits with an error (or a warning for exit code 1, if that process matches the Borg local path).
exits with an error (or a warning for exit code 1, if that process does not match the Borg local
path).
If output log level is None, then instead of logging, capture output for each process and return
it as a dict from the process to its output.

View file

@ -1,4 +1,6 @@
import copy
import logging
import os
from borgmatic.execute import (
execute_command,
@ -28,10 +30,8 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
In the case of "all", query for the names of databases on the configured host and return them,
excluding any system databases that will cause problems during restore.
'''
requested_name = database['name']
if requested_name != 'all':
return (requested_name,)
if database['name'] != 'all':
return (database['name'],)
show_command = (
('mysql',)
@ -57,6 +57,55 @@ def database_names_to_dump(database, extra_environment, log_prefix, dry_run_labe
)
def execute_dump_command(
database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label
):
'''
Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a
named pipe constructed from the given dump path and database names. Use the given log prefix in
any log entries.
Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if
this is a dry run, then don't actually dump anything and return None.
'''
database_name = database['name']
dump_filename = dump.make_database_dump_filename(
dump_path, database['name'], database.get('hostname')
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}'
)
return None
dump_command = (
('mysqldump',)
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ ('--add-drop-database',)
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
+ (('--user', database['username']) if 'username' in database else ())
+ ('--databases',)
+ database_names
# Use shell redirection rather than execute_command(output_file=open(...)) to prevent
# the open() call on a named pipe from hanging the main borgmatic process.
+ ('>', dump_filename)
)
logger.debug(
f'{log_prefix}: Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
return None
dump.create_named_pipe_for_dump(dump_filename)
return execute_command(
dump_command, shell=True, extra_environment=extra_environment, run_to_completion=False,
)
def dump_databases(databases, log_prefix, location_config, dry_run):
'''
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
@ -73,10 +122,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
for database in databases:
requested_name = database['name']
dump_filename = dump.make_database_dump_filename(
make_dump_path(location_config), requested_name, database.get('hostname')
)
dump_path = make_dump_path(location_config)
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
dump_database_names = database_names_to_dump(
database, extra_environment, log_prefix, dry_run_label
@ -84,41 +130,35 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
if not dump_database_names:
raise ValueError('Cannot find any MySQL databases to dump.')
dump_command = (
('mysqldump',)
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
+ ('--add-drop-database',)
+ (('--host', database['hostname']) if 'hostname' in database else ())
+ (('--port', str(database['port'])) if 'port' in database else ())
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
+ (('--user', database['username']) if 'username' in database else ())
+ ('--databases',)
+ dump_database_names
# Use shell redirection rather than execute_command(output_file=open(...)) to prevent
# the open() call on a named pipe from hanging the main borgmatic process.
+ ('>', dump_filename)
)
logger.debug(
'{}: Dumping MySQL database {} to {}{}'.format(
log_prefix, requested_name, dump_filename, dry_run_label
)
)
if dry_run:
continue
dump.create_named_pipe_for_dump(dump_filename)
if database['name'] == 'all' and database.get('format'):
for dump_name in dump_database_names:
renamed_database = copy.copy(database)
renamed_database['name'] = dump_name
processes.append(
execute_command(
dump_command,
shell=True,
extra_environment=extra_environment,
run_to_completion=False,
execute_dump_command(
renamed_database,
log_prefix,
dump_path,
(dump_name,),
extra_environment,
dry_run,
dry_run_label,
)
)
else:
processes.append(
execute_dump_command(
database,
log_prefix,
dump_path,
dump_database_names,
extra_environment,
dry_run,
dry_run_label,
)
)
return processes
return [process for process in processes if process]
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover

View file

@ -1,5 +1,6 @@
import csv
import logging
import os
from borgmatic.execute import (
execute_command,
@ -111,6 +112,11 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
dump_filename = dump.make_database_dump_filename(
dump_path, database_name, database.get('hostname')
)
if os.path.exists(dump_filename):
logger.warning(
f'{log_prefix}: Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}'
)
continue
command = (
(dump_command, '--no-password', '--clean', '--if-exists',)
@ -128,9 +134,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
)
logger.debug(
'{}: Dumping PostgreSQL database "{}" to {}{}'.format(
log_prefix, database_name, dump_filename, dry_run_label
)
f'{log_prefix}: Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}'
)
if dry_run:
continue

View file

@ -42,6 +42,11 @@ hooks:
hostname: postgresql
username: postgres
password: test
- name: all
format: custom
hostname: postgresql
username: postgres
password: test
mysql_databases:
- name: test
hostname: mysql
@ -51,6 +56,11 @@ hooks:
hostname: mysql
username: root
password: test
- name: all
format: sql
hostname: mysql
username: root
password: test
mongodb_databases:
- name: test
hostname: mongodb

View file

View file

@ -0,0 +1,22 @@
from flexmock import flexmock
from borgmatic.actions import borg as module
def test_run_borg_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module.borgmatic.borg.borg).should_receive('run_arbitrary_borg')
borg_arguments = flexmock(repository=flexmock(), archive=flexmock(), options=flexmock())
module.run_borg(
repository='repo',
storage={},
local_borg_version=None,
borg_arguments=borg_arguments,
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,19 @@
from flexmock import flexmock
from borgmatic.actions import break_lock as module
def test_run_break_lock_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.break_lock).should_receive('break_lock')
break_lock_arguments = flexmock(repository=flexmock())
module.run_break_lock(
repository='repo',
storage={},
local_borg_version=None,
break_lock_arguments=break_lock_arguments,
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,31 @@
from flexmock import flexmock
from borgmatic.actions import check as module
def test_run_check_calls_hooks():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.checks).should_receive(
'repository_enabled_for_checks'
).and_return(True)
flexmock(module.borgmatic.borg.check).should_receive('check_archives')
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
check_arguments = flexmock(
progress=flexmock(), repair=flexmock(), only=flexmock(), force=flexmock()
)
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
module.run_check(
config_filename='test.yaml',
repository='repo',
location={'repositories': ['repo']},
storage={},
consistency={},
hooks={},
hook_context={},
local_borg_version=None,
check_arguments=check_arguments,
global_arguments=global_arguments,
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,29 @@
from flexmock import flexmock
from borgmatic.actions import compact as module
def test_compact_actions_calls_hooks():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.borg.feature).should_receive('available').and_return(True)
flexmock(module.borgmatic.borg.compact).should_receive('compact_segments')
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
compact_arguments = flexmock(
progress=flexmock(), cleanup_commits=flexmock(), threshold=flexmock()
)
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
module.run_compact(
config_filename='test.yaml',
repository='repo',
storage={},
retention={},
hooks={},
hook_context={},
local_borg_version=None,
compact_arguments=compact_arguments,
global_arguments=global_arguments,
dry_run_label='',
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,34 @@
from flexmock import flexmock
from borgmatic.actions import create as module
def test_run_create_executes_and_calls_hooks():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.borg.create).should_receive('create_archive')
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return({})
flexmock(module.borgmatic.hooks.dispatch).should_receive(
'call_hooks_even_if_unconfigured'
).and_return({})
create_arguments = flexmock(
progress=flexmock(), stats=flexmock(), json=flexmock(), list_files=flexmock()
)
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
list(
module.run_create(
config_filename='test.yaml',
repository='repo',
location={},
storage={},
hooks={},
hook_context={},
local_borg_version=None,
create_arguments=create_arguments,
global_arguments=global_arguments,
dry_run_label='',
local_path=None,
remote_path=None,
)
)

View file

@ -0,0 +1,29 @@
from flexmock import flexmock
from borgmatic.actions import export_tar as module
def test_run_export_tar_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.export_tar).should_receive('export_tar_archive')
export_tar_arguments = flexmock(
repository=flexmock(),
archive=flexmock(),
paths=flexmock(),
destination=flexmock(),
tar_filter=flexmock(),
list_files=flexmock(),
strip_components=flexmock(),
)
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
module.run_export_tar(
repository='repo',
storage={},
local_borg_version=None,
export_tar_arguments=export_tar_arguments,
global_arguments=global_arguments,
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,33 @@
from flexmock import flexmock
from borgmatic.actions import extract as module
def test_run_extract_calls_hooks():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.extract).should_receive('extract_archive')
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
extract_arguments = flexmock(
paths=flexmock(),
progress=flexmock(),
destination=flexmock(),
strip_components=flexmock(),
archive=flexmock(),
repository='repo',
)
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
module.run_extract(
config_filename='test.yaml',
repository='repo',
location={'repositories': ['repo']},
storage={},
hooks={},
hook_context={},
local_borg_version=None,
extract_arguments=extract_arguments,
global_arguments=global_arguments,
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,24 @@
from flexmock import flexmock
from borgmatic.actions import info as module
def test_run_info_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module.borgmatic.borg.info).should_receive('display_archives_info')
info_arguments = flexmock(repository=flexmock(), archive=flexmock(), json=flexmock())
list(
module.run_info(
repository='repo',
storage={},
local_borg_version=None,
info_arguments=info_arguments,
local_path=None,
remote_path=None,
)
)

View file

@ -0,0 +1,24 @@
from flexmock import flexmock
from borgmatic.actions import list as module
def test_run_list_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module.borgmatic.borg.list).should_receive('list_archive')
list_arguments = flexmock(repository=flexmock(), archive=flexmock(), json=flexmock())
list(
module.run_list(
repository='repo',
storage={},
local_borg_version=None,
list_arguments=list_arguments,
local_path=None,
remote_path=None,
)
)

View file

@ -0,0 +1,26 @@
from flexmock import flexmock
from borgmatic.actions import mount as module
def test_run_mount_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.mount).should_receive('mount_archive')
mount_arguments = flexmock(
repository=flexmock(),
archive=flexmock(),
mount_point=flexmock(),
paths=flexmock(),
foreground=flexmock(),
options=flexmock(),
)
module.run_mount(
repository='repo',
storage={},
local_borg_version=None,
mount_arguments=mount_arguments,
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,26 @@
from flexmock import flexmock
from borgmatic.actions import prune as module
def test_run_prune_calls_hooks():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.borg.prune).should_receive('prune_archives')
flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2)
prune_arguments = flexmock(stats=flexmock(), list_archives=flexmock())
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
module.run_prune(
config_filename='test.yaml',
repository='repo',
storage={},
retention={},
hooks={},
hook_context={},
local_borg_version=None,
prune_arguments=prune_arguments,
global_arguments=global_arguments,
dry_run_label='',
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,26 @@
from flexmock import flexmock
from borgmatic.actions import rcreate as module
def test_run_rcreate_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.borg.rcreate).should_receive('create_repository')
arguments = flexmock(
encryption_mode=flexmock(),
source_repository=flexmock(),
copy_crypt_key=flexmock(),
append_only=flexmock(),
storage_quota=flexmock(),
make_parent_dirs=flexmock(),
)
module.run_rcreate(
repository='repo',
storage={},
local_borg_version=None,
rcreate_arguments=arguments,
global_arguments=flexmock(dry_run=False),
local_path=None,
remote_path=None,
)

View file

@ -0,0 +1,495 @@
import pytest
from flexmock import flexmock
import borgmatic.actions.restore as module
def test_get_configured_database_matches_database_by_name():
assert module.get_configured_database(
hooks={
'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
archive_database_names={'postgresql_databases': ['other', 'foo', 'bar']},
hook_name='postgresql_databases',
database_name='bar',
) == ('postgresql_databases', {'name': 'bar'})
def test_get_configured_database_matches_nothing_when_database_name_not_configured():
assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='quux',
) == (None, None)
def test_get_configured_database_matches_nothing_when_database_name_not_in_archive():
assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['bar']},
hook_name='postgresql_databases',
database_name='foo',
) == (None, None)
def test_get_configured_database_matches_database_by_configuration_database_name():
assert module.get_configured_database(
hooks={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]},
archive_database_names={'postgresql_databases': ['foo']},
hook_name='postgresql_databases',
database_name='foo',
configuration_database_name='all',
) == ('postgresql_databases', {'name': 'all'})
def test_get_configured_database_with_unspecified_hook_matches_database_by_name():
assert module.get_configured_database(
hooks={
'other_databases': [{'name': 'other'}],
'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}],
},
archive_database_names={'postgresql_databases': ['other', 'foo', 'bar']},
hook_name=module.UNSPECIFIED_HOOK,
database_name='bar',
) == ('postgresql_databases', {'name': 'bar'})
def test_collect_archive_database_names_parses_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('')
flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return(
[
'.borgmatic/postgresql_databases/localhost/foo',
'.borgmatic/postgresql_databases/localhost/bar',
'.borgmatic/mysql_databases/localhost/quux',
]
)
archive_database_names = module.collect_archive_database_names(
repository='repo',
archive='archive',
location={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
)
assert archive_database_names == {
'postgresql_databases': ['foo', 'bar'],
'mysql_databases': ['quux'],
}
def test_collect_archive_database_names_parses_directory_format_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('')
flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return(
[
'.borgmatic/postgresql_databases/localhost/foo/table1',
'.borgmatic/postgresql_databases/localhost/foo/table2',
]
)
archive_database_names = module.collect_archive_database_names(
repository='repo',
archive='archive',
location={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
)
assert archive_database_names == {
'postgresql_databases': ['foo'],
}
def test_collect_archive_database_names_skips_bad_archive_paths():
flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('')
flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return(
['.borgmatic/postgresql_databases/localhost/foo', '.borgmatic/invalid', 'invalid/as/well']
)
archive_database_names = module.collect_archive_database_names(
repository='repo',
archive='archive',
location={'borgmatic_source_directory': '.borgmatic'},
storage=flexmock(),
local_borg_version=flexmock(),
local_path=flexmock(),
remote_path=flexmock(),
)
assert archive_database_names == {
'postgresql_databases': ['foo'],
}
def test_find_databases_to_restore_passes_through_requested_names_found_in_archive():
restore_names = module.find_databases_to_restore(
requested_database_names=['foo', 'bar'],
archive_database_names={'postresql_databases': ['foo', 'bar', 'baz']},
)
assert restore_names == {module.UNSPECIFIED_HOOK: ['foo', 'bar']}
def test_find_databases_to_restore_raises_for_requested_names_missing_from_archive():
with pytest.raises(ValueError):
module.find_databases_to_restore(
requested_database_names=['foo', 'bar'],
archive_database_names={'postresql_databases': ['foo']},
)
def test_find_databases_to_restore_without_requested_names_finds_all_archive_databases():
archive_database_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore(
requested_database_names=[], archive_database_names=archive_database_names,
)
assert restore_names == archive_database_names
def test_find_databases_to_restore_with_all_in_requested_names_finds_all_archive_databases():
archive_database_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore(
requested_database_names=['all'], archive_database_names=archive_database_names,
)
assert restore_names == archive_database_names
def test_find_databases_to_restore_with_all_in_requested_names_plus_additional_requested_names_omits_duplicates():
archive_database_names = {'postresql_databases': ['foo', 'bar']}
restore_names = module.find_databases_to_restore(
requested_database_names=['all', 'foo', 'bar'],
archive_database_names=archive_database_names,
)
assert restore_names == archive_database_names
def test_find_databases_to_restore_raises_for_all_in_requested_names_and_requested_named_missing_from_archives():
with pytest.raises(ValueError):
module.find_databases_to_restore(
requested_database_names=['all', 'foo', 'bar'],
archive_database_names={'postresql_databases': ['foo']},
)
def test_ensure_databases_found_with_all_databases_found_does_not_raise():
module.ensure_databases_found(
restore_names={'postgresql_databases': ['foo']},
remaining_restore_names={'postgresql_databases': ['bar']},
found_names=['foo', 'bar'],
)
def test_ensure_databases_found_with_no_databases_raises():
with pytest.raises(ValueError):
module.ensure_databases_found(
restore_names={'postgresql_databases': []}, remaining_restore_names={}, found_names=[],
)
def test_ensure_databases_found_with_missing_databases_raises():
with pytest.raises(ValueError):
module.ensure_databases_found(
restore_names={'postgresql_databases': ['foo']},
remaining_restore_names={'postgresql_databases': ['bar']},
found_names=['foo'],
)
def test_run_restore_restores_each_database():
restore_names = {
'postgresql_databases': ['foo', 'bar'],
}
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured')
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').and_return(
('postgresql_databases', {'name': 'foo'})
).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo'},
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'bar'},
).once()
flexmock(module).should_receive('ensure_databases_found')
module.run_restore(
repository='repo',
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False),
local_path=flexmock(),
remote_path=flexmock(),
)
def test_run_restore_bails_for_non_matching_repository():
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(
False
)
flexmock(module.borgmatic.hooks.dispatch).should_receive(
'call_hooks_even_if_unconfigured'
).never()
flexmock(module).should_receive('restore_single_database').never()
module.run_restore(
repository='repo',
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False),
local_path=flexmock(),
remote_path=flexmock(),
)
def test_run_restore_restores_database_configured_with_all_name():
restore_names = {
'postgresql_databases': ['foo', 'bar'],
}
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured')
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
configuration_database_name='all',
).and_return(('postgresql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo'},
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'bar'},
).once()
flexmock(module).should_receive('ensure_databases_found')
module.run_restore(
repository='repo',
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False),
local_path=flexmock(),
remote_path=flexmock(),
)
def test_run_restore_skips_missing_database():
restore_names = {
'postgresql_databases': ['foo', 'bar'],
}
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured')
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
).and_return((None, None))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='bar',
configuration_database_name='all',
).and_return((None, None))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo'},
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'bar'},
).never()
flexmock(module).should_receive('ensure_databases_found')
module.run_restore(
repository='repo',
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False),
local_path=flexmock(),
remote_path=flexmock(),
)
def test_run_restore_restores_databases_from_different_hooks():
restore_names = {
'postgresql_databases': ['foo'],
'mysql_databases': ['bar'],
}
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured')
flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return(
flexmock()
)
flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock())
flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names)
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='postgresql_databases',
database_name='foo',
).and_return(('postgresql_databases', {'name': 'foo'}))
flexmock(module).should_receive('get_configured_database').with_args(
hooks=object,
archive_database_names=object,
hook_name='mysql_databases',
database_name='bar',
).and_return(('mysql_databases', {'name': 'bar'}))
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='postgresql_databases',
database={'name': 'foo'},
).once()
flexmock(module).should_receive('restore_single_database').with_args(
repository=object,
location=object,
storage=object,
hooks=object,
local_borg_version=object,
global_arguments=object,
local_path=object,
remote_path=object,
archive_name=object,
hook_name='mysql_databases',
database={'name': 'bar'},
).once()
flexmock(module).should_receive('ensure_databases_found')
module.run_restore(
repository='repo',
location=flexmock(),
storage=flexmock(),
hooks=flexmock(),
local_borg_version=flexmock(),
restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()),
global_arguments=flexmock(dry_run=False),
local_path=flexmock(),
remote_path=flexmock(),
)

View file

@ -0,0 +1,21 @@
from flexmock import flexmock
from borgmatic.actions import rinfo as module
def test_run_rinfo_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.rinfo).should_receive('display_repository_info')
rinfo_arguments = flexmock(repository=flexmock(), json=flexmock())
list(
module.run_rinfo(
repository='repo',
storage={},
local_borg_version=None,
rinfo_arguments=rinfo_arguments,
local_path=None,
remote_path=None,
)
)

View file

@ -0,0 +1,21 @@
from flexmock import flexmock
from borgmatic.actions import rlist as module
def test_run_rlist_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borgmatic.borg.rlist).should_receive('list_repository')
rlist_arguments = flexmock(repository=flexmock(), json=flexmock())
list(
module.run_rlist(
repository='repo',
storage={},
local_borg_version=None,
rlist_arguments=rlist_arguments,
local_path=None,
remote_path=None,
)
)

View file

@ -0,0 +1,20 @@
from flexmock import flexmock
from borgmatic.actions import transfer as module
def test_run_transfer_does_not_raise():
flexmock(module.logger).answer = lambda message: None
flexmock(module.borgmatic.borg.transfer).should_receive('transfer_archives')
transfer_arguments = flexmock()
global_arguments = flexmock(monitoring_verbosity=1, dry_run=False)
module.run_transfer(
repository='repo',
storage={},
local_borg_version=None,
transfer_arguments=transfer_arguments,
global_arguments=global_arguments,
local_path=None,
remote_path=None,
)

View file

@ -253,6 +253,19 @@ def test_make_find_paths_adds_globs_to_path_fragments():
assert module.make_find_paths(('foo.txt',)) == ('sh:**/*foo.txt*/**',)
def test_capture_archive_listing_does_not_raise():
flexmock(module.environment).should_receive('make_environment')
flexmock(module).should_receive('execute_command_and_capture_output').and_return('')
flexmock(module).should_receive('make_list_command')
module.capture_archive_listing(
repository='repo',
archive='archive',
storage_config=flexmock(),
local_borg_version=flexmock(),
)
def test_list_archive_calls_borg_with_parameters():
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER

View file

@ -357,455 +357,387 @@ def test_run_configuration_retries_timeout_multiple_repos():
assert results == error_logs
def test_run_actions_does_not_raise_for_rcreate_action():
def test_run_actions_runs_rcreate():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.borg_rcreate).should_receive('create_repository')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'rcreate': flexmock(
encryption_mode=flexmock(),
source_repository=flexmock(),
copy_crypt_key=flexmock(),
append_only=flexmock(),
storage_quota=flexmock(),
make_parent_dirs=flexmock(),
),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.rcreate).should_receive('run_rcreate').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'rcreate': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_transfer_action():
def test_run_actions_runs_transfer():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.borg_transfer).should_receive('transfer_archives')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'transfer': flexmock(),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.transfer).should_receive('run_transfer').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'transfer': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_calls_hooks_for_prune_action():
def test_run_actions_runs_prune():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.borg_prune).should_receive('prune_archives')
flexmock(module.command).should_receive('execute_hook').times(
4
) # Before/after extract and before/after actions.
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'prune': flexmock(stats=flexmock(), list_archives=flexmock()),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.prune).should_receive('run_prune').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'prune': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_calls_hooks_for_compact_action():
def test_run_actions_runs_compact():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.borg_feature).should_receive('available').and_return(True)
flexmock(module.borg_compact).should_receive('compact_segments')
flexmock(module.command).should_receive('execute_hook').times(
4
) # Before/after extract and before/after actions.
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'compact': flexmock(progress=flexmock(), cleanup_commits=flexmock(), threshold=flexmock()),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.compact).should_receive('run_compact').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'compact': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_executes_and_calls_hooks_for_create_action():
def test_run_actions_runs_create():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.borg_create).should_receive('create_archive')
flexmock(module.command).should_receive('execute_hook').times(
4
) # Before/after extract and before/after actions.
flexmock(module.dispatch).should_receive('call_hooks').and_return({})
flexmock(module.dispatch).should_receive('call_hooks_even_if_unconfigured').and_return({})
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'create': flexmock(
progress=flexmock(), stats=flexmock(), json=flexmock(), list_files=flexmock()
),
}
flexmock(module.command).should_receive('execute_hook')
expected = flexmock()
flexmock(borgmatic.actions.create).should_receive('run_create').and_yield(expected).once()
list(
result = tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'create': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
assert result == (expected,)
def test_run_actions_calls_hooks_for_check_action():
def test_run_actions_runs_check_when_repository_enabled_for_checks():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.command).should_receive('execute_hook')
flexmock(module.checks).should_receive('repository_enabled_for_checks').and_return(True)
flexmock(module.borg_check).should_receive('check_archives')
flexmock(module.command).should_receive('execute_hook').times(
4
) # Before/after extract and before/after actions.
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'check': flexmock(
progress=flexmock(), repair=flexmock(), only=flexmock(), force=flexmock()
),
}
flexmock(borgmatic.actions.check).should_receive('run_check').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'check': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_calls_hooks_for_extract_action():
def test_run_actions_skips_check_when_repository_not_enabled_for_checks():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_extract).should_receive('extract_archive')
flexmock(module.command).should_receive('execute_hook').times(
4
) # Before/after extract and before/after actions.
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'extract': flexmock(
paths=flexmock(),
progress=flexmock(),
destination=flexmock(),
strip_components=flexmock(),
archive=flexmock(),
repository='repo',
),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(module.checks).should_receive('repository_enabled_for_checks').and_return(False)
flexmock(borgmatic.actions.check).should_receive('run_check').never()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'check': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_export_tar_action():
def test_run_actions_runs_extract():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_export_tar).should_receive('export_tar_archive')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'export-tar': flexmock(
repository=flexmock(),
archive=flexmock(),
paths=flexmock(),
destination=flexmock(),
tar_filter=flexmock(),
list_files=flexmock(),
strip_components=flexmock(),
),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.extract).should_receive('run_extract').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'extract': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_mount_action():
def test_run_actions_runs_export_tar():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_mount).should_receive('mount_archive')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'mount': flexmock(
repository=flexmock(),
archive=flexmock(),
mount_point=flexmock(),
paths=flexmock(),
foreground=flexmock(),
options=flexmock(),
),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.export_tar).should_receive('run_export_tar').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'export-tar': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_rlist_action():
def test_run_actions_runs_mount():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_rlist).should_receive('list_repository')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'rlist': flexmock(repository=flexmock(), json=flexmock()),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.mount).should_receive('run_mount').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'mount': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_list_action():
def test_run_actions_runs_restore():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_rlist).should_receive('resolve_archive_name').and_return(flexmock())
flexmock(module.borg_list).should_receive('list_archive')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'list': flexmock(repository=flexmock(), archive=flexmock(), json=flexmock()),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.restore).should_receive('run_restore').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'restore': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_rinfo_action():
def test_run_actions_runs_rlist():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_rinfo).should_receive('display_repository_info')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'rinfo': flexmock(repository=flexmock(), json=flexmock()),
}
flexmock(module.command).should_receive('execute_hook')
expected = flexmock()
flexmock(borgmatic.actions.rlist).should_receive('run_rlist').and_yield(expected).once()
list(
result = tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'rlist': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
assert result == (expected,)
def test_run_actions_runs_list():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.command).should_receive('execute_hook')
expected = flexmock()
flexmock(borgmatic.actions.list).should_receive('run_list').and_yield(expected).once()
result = tuple(
module.run_actions(
arguments={'global': flexmock(dry_run=False), 'list': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
assert result == (expected,)
def test_run_actions_runs_rinfo():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.command).should_receive('execute_hook')
expected = flexmock()
flexmock(borgmatic.actions.rinfo).should_receive('run_rinfo').and_yield(expected).once()
result = tuple(
module.run_actions(
arguments={'global': flexmock(dry_run=False), 'rinfo': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
assert result == (expected,)
def test_run_actions_runs_info():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.command).should_receive('execute_hook')
expected = flexmock()
flexmock(borgmatic.actions.info).should_receive('run_info').and_yield(expected).once()
result = tuple(
module.run_actions(
arguments={'global': flexmock(dry_run=False), 'info': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
assert result == (expected,)
def test_run_actions_runs_break_lock():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.break_lock).should_receive('run_break_lock').once()
tuple(
module.run_actions(
arguments={'global': flexmock(dry_run=False), 'break-lock': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_info_action():
def test_run_actions_runs_borg():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_rlist).should_receive('resolve_archive_name').and_return(flexmock())
flexmock(module.borg_info).should_receive('display_archives_info')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'info': flexmock(repository=flexmock(), archive=flexmock(), json=flexmock()),
}
flexmock(module.command).should_receive('execute_hook')
flexmock(borgmatic.actions.borg).should_receive('run_borg').once()
list(
tuple(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
arguments={'global': flexmock(dry_run=False), 'borg': flexmock()},
config_filename=flexmock(),
location={'repositories': []},
storage=flexmock(),
retention=flexmock(),
consistency=flexmock(),
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_break_lock_action():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_break_lock).should_receive('break_lock')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'break-lock': flexmock(repository=flexmock()),
}
list(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
repository_path='repo',
)
)
def test_run_actions_does_not_raise_for_borg_action():
flexmock(module).should_receive('add_custom_log_levels')
flexmock(module.logger).answer = lambda message: None
flexmock(module.validate).should_receive('repositories_match').and_return(True)
flexmock(module.borg_rlist).should_receive('resolve_archive_name').and_return(flexmock())
flexmock(module.borg_borg).should_receive('run_arbitrary_borg')
arguments = {
'global': flexmock(monitoring_verbosity=1, dry_run=False),
'borg': flexmock(repository=flexmock(), archive=flexmock(), options=flexmock()),
}
list(
module.run_actions(
arguments=arguments,
config_filename='test.yaml',
location={'repositories': ['repo']},
storage={},
retention={},
consistency={},
hooks={},
local_path=None,
remote_path=None,
local_borg_version=None,
local_path=flexmock(),
remote_path=flexmock(),
local_borg_version=flexmock(),
repository_path='repo',
)
)

View file

@ -34,169 +34,87 @@ def test_database_names_to_dump_queries_mysql_for_database_names():
assert names == ('foo', 'bar')
def test_dump_databases_runs_mysqldump_for_each_database():
def test_dump_databases_dumps_each_database():
databases = [{'name': 'foo'}, {'name': 'bar'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return(
('bar',)
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_command').with_args(
(
'mysqldump',
'--add-drop-database',
'--databases',
name,
'>',
'databases/localhost/{}'.format(name),
),
shell=True,
extra_environment=None,
run_to_completion=False,
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': name},
log_prefix=object,
dump_path=object,
database_names=(name,),
extra_environment=object,
dry_run=object,
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes
def test_dump_databases_with_dry_run_skips_mysqldump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
def test_dump_databases_dumps_with_password():
database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'}
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return(
('bar',)
)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
module.dump_databases(databases, 'test.yaml', {}, dry_run=True)
def test_dump_databases_runs_mysqldump_with_hostname_and_port():
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/database.example.org/foo'
)
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
(
'mysqldump',
'--add-drop-database',
'--host',
'database.example.org',
'--port',
'5433',
'--protocol',
'tcp',
'--databases',
'foo',
'>',
'databases/database.example.org/foo',
),
shell=True,
extra_environment=None,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mysqldump_with_username_and_password():
databases = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
(
'mysqldump',
'--add-drop-database',
'--user',
'root',
'--databases',
'foo',
'>',
'databases/localhost/foo',
),
shell=True,
flexmock(module).should_receive('execute_dump_command').with_args(
database=database,
log_prefix=object,
dump_path=object,
database_names=('foo',),
extra_environment={'MYSQL_PWD': 'trustsome1'},
run_to_completion=False,
dry_run=object,
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
assert module.dump_databases([database], 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mysqldump_with_options():
databases = [{'name': 'foo', 'options': '--stuff=such'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',))
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
(
'mysqldump',
'--stuff=such',
'--add-drop-database',
'--databases',
'foo',
'>',
'databases/localhost/foo',
),
shell=True,
extra_environment=None,
run_to_completion=False,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_runs_mysqldump_for_all_databases():
def test_dump_databases_dumps_all_databases_at_once():
databases = [{'name': 'all'}]
process = flexmock()
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar'))
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
(
'mysqldump',
'--add-drop-database',
'--databases',
'foo',
'bar',
'>',
'databases/localhost/all',
),
shell=True,
extra_environment=None,
run_to_completion=False,
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': 'all'},
log_prefix=object,
dump_path=object,
database_names=('foo', 'bar'),
extra_environment=object,
dry_run=object,
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process]
def test_dump_databases_dumps_all_databases_separately_when_format_configured():
databases = [{'name': 'all', 'format': 'sql'}]
processes = [flexmock(), flexmock()]
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar'))
for name, process in zip(('foo', 'bar'), processes):
flexmock(module).should_receive('execute_dump_command').with_args(
database={'name': name, 'format': 'sql'},
log_prefix=object,
dump_path=object,
database_names=(name,),
extra_environment=object,
dry_run=object,
dry_run_label=object,
).and_return(process).once()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes
def test_database_names_to_dump_runs_mysql_with_list_options():
database = {'name': 'all', 'list_options': '--defaults-extra-file=my.cnf'}
flexmock(module).should_receive('execute_command_and_capture_output').with_args(
@ -214,6 +132,168 @@ def test_database_names_to_dump_runs_mysql_with_list_options():
assert module.database_names_to_dump(database, None, 'test.yaml', '') == ('foo', 'bar')
def test_execute_dump_command_runs_mysqldump():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
('mysqldump', '--add-drop-database', '--databases', 'foo', '>', 'dump',),
shell=True,
extra_environment=None,
run_to_completion=False,
).and_return(process).once()
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
dry_run=False,
dry_run_label='',
)
== process
)
def test_execute_dump_command_runs_mysqldump_with_hostname_and_port():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
(
'mysqldump',
'--add-drop-database',
'--host',
'database.example.org',
'--port',
'5433',
'--protocol',
'tcp',
'--databases',
'foo',
'>',
'dump',
),
shell=True,
extra_environment=None,
run_to_completion=False,
).and_return(process).once()
assert (
module.execute_dump_command(
database={'name': 'foo', 'hostname': 'database.example.org', 'port': 5433},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
dry_run=False,
dry_run_label='',
)
== process
)
def test_execute_dump_command_runs_mysqldump_with_username_and_password():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
('mysqldump', '--add-drop-database', '--user', 'root', '--databases', 'foo', '>', 'dump',),
shell=True,
extra_environment={'MYSQL_PWD': 'trustsome1'},
run_to_completion=False,
).and_return(process).once()
assert (
module.execute_dump_command(
database={'name': 'foo', 'username': 'root', 'password': 'trustsome1'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment={'MYSQL_PWD': 'trustsome1'},
dry_run=False,
dry_run_label='',
)
== process
)
def test_execute_dump_command_runs_mysqldump_with_options():
process = flexmock()
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
('mysqldump', '--stuff=such', '--add-drop-database', '--databases', 'foo', '>', 'dump',),
shell=True,
extra_environment=None,
run_to_completion=False,
).and_return(process).once()
assert (
module.execute_dump_command(
database={'name': 'foo', 'options': '--stuff=such'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
dry_run=False,
dry_run_label='',
)
== process
)
def test_execute_dump_command_with_duplicate_dump_skips_mysqldump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
dry_run=True,
dry_run_label='SO DRY',
)
is None
)
def test_execute_dump_command_with_dry_run_skips_mysqldump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').never()
assert (
module.execute_dump_command(
database={'name': 'foo'},
log_prefix='log',
dump_path=flexmock(),
database_names=('foo',),
extra_environment=None,
dry_run=True,
dry_run_label='SO DRY',
)
is None
)
def test_dump_databases_errors_for_missing_all_databases():
databases = [{'name': 'all'}]
process = flexmock()

View file

@ -56,6 +56,7 @@ def test_dump_databases_runs_pg_dump_for_each_database():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
for name, process in zip(('foo', 'bar'), processes):
@ -79,7 +80,7 @@ def test_dump_databases_runs_pg_dump_for_each_database():
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes
def test_dump_databases_runs_raises_when_no_database_names_to_dump():
def test_dump_databases_raises_when_no_database_names_to_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
@ -89,6 +90,23 @@ def test_dump_databases_runs_raises_when_no_database_names_to_dump():
module.dump_databases(databases, 'test.yaml', {}, dry_run=False)
def test_dump_databases_with_dupliate_dump_skips_pg_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
flexmock(module).should_receive('make_dump_path').and_return('')
flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return(
('bar',)
)
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os.path).should_receive('exists').and_return(True)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == []
def test_dump_databases_with_dry_run_skips_pg_dump():
databases = [{'name': 'foo'}, {'name': 'bar'}]
flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'})
@ -99,6 +117,7 @@ def test_dump_databases_with_dry_run_skips_pg_dump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
).and_return('databases/localhost/bar')
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
flexmock(module).should_receive('execute_command').never()
@ -114,6 +133,7 @@ def test_dump_databases_runs_pg_dump_with_hostname_and_port():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/database.example.org/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
@ -151,6 +171,7 @@ def test_dump_databases_runs_pg_dump_with_username_and_password():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
@ -207,6 +228,7 @@ def test_dump_databases_runs_pg_dump_with_directory_format():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_parent_directory_for_dump')
flexmock(module.dump).should_receive('create_named_pipe_for_dump').never()
@ -239,6 +261,7 @@ def test_dump_databases_runs_pg_dump_with_options():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
@ -271,6 +294,7 @@ def test_dump_databases_runs_pg_dumpall_for_all_databases():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/all'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(
@ -292,6 +316,7 @@ def test_dump_databases_runs_non_default_pg_dump():
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
'databases/localhost/foo'
)
flexmock(module.os.path).should_receive('exists').and_return(False)
flexmock(module.dump).should_receive('create_named_pipe_for_dump')
flexmock(module).should_receive('execute_command').with_args(