First crack at MySQL/MariaDB database restore (#228).
This commit is contained in:
parent
4ed43ae4dc
commit
ded042d8cc
12 changed files with 354 additions and 157 deletions
|
@ -18,7 +18,7 @@ from borgmatic.borg import list as borg_list
|
|||
from borgmatic.borg import prune as borg_prune
|
||||
from borgmatic.commands.arguments import parse_arguments
|
||||
from borgmatic.config import checks, collect, convert, validate
|
||||
from borgmatic.hooks import command, cronhub, cronitor, healthchecks, mysql, postgresql
|
||||
from borgmatic.hooks import command, cronhub, cronitor, dispatch, dump, healthchecks
|
||||
from borgmatic.logger import configure_logging, should_do_markup
|
||||
from borgmatic.signals import configure_signals
|
||||
from borgmatic.verbosity import verbosity_to_log_level
|
||||
|
@ -69,11 +69,12 @@ def run_configuration(config_filename, config, arguments):
|
|||
'pre-backup',
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
postgresql.dump_databases(
|
||||
hooks.get('postgresql_databases'), config_filename, global_arguments.dry_run
|
||||
)
|
||||
mysql.dump_databases(
|
||||
hooks.get('mysql_databases'), config_filename, global_arguments.dry_run
|
||||
dispatch.call_hooks(
|
||||
'dump_databases',
|
||||
hooks,
|
||||
config_filename,
|
||||
dump.DATABASE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
except (OSError, CalledProcessError) as error:
|
||||
encountered_error = error
|
||||
|
@ -104,11 +105,12 @@ def run_configuration(config_filename, config, arguments):
|
|||
|
||||
if 'create' in arguments and not encountered_error:
|
||||
try:
|
||||
postgresql.remove_database_dumps(
|
||||
hooks.get('postgresql_databases'), config_filename, global_arguments.dry_run
|
||||
)
|
||||
mysql.remove_database_dumps(
|
||||
hooks.get('mysql_databases'), config_filename, global_arguments.dry_run
|
||||
dispatch.call_hooks(
|
||||
'remove_database_dumps',
|
||||
hooks,
|
||||
config_filename,
|
||||
dump.DATABASE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
command.execute_hook(
|
||||
hooks.get('after_backup'),
|
||||
|
@ -260,12 +262,20 @@ def run_actions(
|
|||
restore_names = []
|
||||
|
||||
# Extract dumps for the named databases from the archive.
|
||||
dump_patterns = postgresql.make_database_dump_patterns(restore_names)
|
||||
dump_patterns = dispatch.call_hooks(
|
||||
'make_database_dump_patterns',
|
||||
hooks,
|
||||
repository,
|
||||
dump.DATABASE_HOOK_NAMES,
|
||||
restore_names,
|
||||
)
|
||||
borg_extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
repository,
|
||||
arguments['restore'].archive,
|
||||
postgresql.convert_glob_patterns_to_borg_patterns(dump_patterns),
|
||||
dump.convert_glob_patterns_to_borg_patterns(
|
||||
[pattern for patterns in dump_patterns.values() for pattern in patterns]
|
||||
),
|
||||
location,
|
||||
storage,
|
||||
local_path=local_path,
|
||||
|
@ -274,17 +284,35 @@ def run_actions(
|
|||
progress=arguments['restore'].progress,
|
||||
)
|
||||
|
||||
# Map the restore names to the corresponding database configurations.
|
||||
databases = list(
|
||||
postgresql.get_database_configurations(
|
||||
hooks.get('postgresql_databases'),
|
||||
restore_names or postgresql.get_database_names_from_dumps(dump_patterns),
|
||||
# Map the restore names or detected dumps to the corresponding database configurations.
|
||||
# TODO: Need to filter restore_names by database type? Maybe take a database --type argument to disambiguate.
|
||||
restore_databases = {
|
||||
hook_name: list(
|
||||
dump.get_database_configurations(
|
||||
hooks.get(hook_name),
|
||||
restore_names
|
||||
or dump.get_database_names_from_dumps(dump_patterns['hook_name']),
|
||||
)
|
||||
)
|
||||
)
|
||||
for hook_name in dump.DATABASE_HOOK_NAMES
|
||||
if hook_name in hooks
|
||||
}
|
||||
|
||||
# Finally, restore the databases and cleanup the dumps.
|
||||
postgresql.restore_database_dumps(databases, repository, global_arguments.dry_run)
|
||||
postgresql.remove_database_dumps(databases, repository, global_arguments.dry_run)
|
||||
dispatch.call_hooks(
|
||||
'restore_database_dumps',
|
||||
restore_databases,
|
||||
repository,
|
||||
dump.DATABASE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
dispatch.call_hooks(
|
||||
'remove_database_dumps',
|
||||
restore_databases,
|
||||
repository,
|
||||
dump.DATABASE_HOOK_NAMES,
|
||||
global_arguments.dry_run,
|
||||
)
|
||||
if 'list' in arguments:
|
||||
if arguments['list'].repository is None or repository == arguments['list'].repository:
|
||||
logger.info('{}: Listing archives'.format(repository))
|
||||
|
|
|
@ -61,6 +61,7 @@ def execute_command(
|
|||
full_command,
|
||||
output_log_level=logging.INFO,
|
||||
output_file=None,
|
||||
input_file=None,
|
||||
shell=False,
|
||||
extra_environment=None,
|
||||
working_directory=None,
|
||||
|
@ -70,10 +71,11 @@ def execute_command(
|
|||
Execute the given command (a sequence of command/argument strings) and log its output at the
|
||||
given log level. If output log level is None, instead capture and return the output. If an
|
||||
open output file object is given, then write stdout to the file and only log stderr (but only
|
||||
if an output log level is set). If shell is True, execute the command within a shell. If an
|
||||
extra environment dict is given, then use it to augment the current environment, and pass the
|
||||
result into the command. If a working directory is given, use that as the present working
|
||||
directory when running the command.
|
||||
if an output log level is set). If an open input file object is given, then read stdin from the
|
||||
file. If shell is True, execute the command within a shell. If an extra environment dict is
|
||||
given, then use it to augment the current environment, and pass the result into the command. If
|
||||
a working directory is given, use that as the present working directory when running the
|
||||
command.
|
||||
|
||||
Raise subprocesses.CalledProcessError if an error occurs while running the command.
|
||||
'''
|
||||
|
@ -88,6 +90,7 @@ def execute_command(
|
|||
else:
|
||||
process = subprocess.Popen(
|
||||
full_command,
|
||||
stdin=input_file,
|
||||
stdout=output_file or subprocess.PIPE,
|
||||
stderr=subprocess.PIPE if output_file else subprocess.STDOUT,
|
||||
shell=shell,
|
||||
|
|
55
borgmatic/hooks/dispatch.py
Normal file
55
borgmatic/hooks/dispatch.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
import logging
|
||||
|
||||
from borgmatic.hooks import mysql, postgresql
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
HOOK_NAME_TO_MODULE = {'postgresql_databases': postgresql, 'mysql_databases': mysql}
|
||||
|
||||
|
||||
def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
|
||||
'''
|
||||
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||
function of the Python module corresponding to the given hook name. Supply that call with the
|
||||
configuration for this hook, the log prefix, and any given args and kwargs. Return any return
|
||||
value.
|
||||
|
||||
If the hook name is not present in the hooks configuration, then bail without calling anything.
|
||||
|
||||
Raise ValueError if the hook name is unknown.
|
||||
Raise AttributeError if the function name is not found in the module.
|
||||
Raise anything else that the called function raises.
|
||||
'''
|
||||
config = hooks.get(hook_name)
|
||||
if not config:
|
||||
logger.debug('{}: No {} hook configured.'.format(log_prefix, hook_name))
|
||||
return
|
||||
|
||||
try:
|
||||
module = HOOK_NAME_TO_MODULE[hook_name]
|
||||
except KeyError:
|
||||
raise ValueError('Unknown hook name: {}'.format(hook_name))
|
||||
|
||||
logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name))
|
||||
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
|
||||
|
||||
|
||||
def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs):
|
||||
'''
|
||||
Given the hooks configuration dict and a prefix to use in log entries, call the requested
|
||||
function of the Python module corresponding to each given hook name. Supply each call with the
|
||||
configuration for that hook, the log prefix, and any given args and kwargs. Collect any return
|
||||
values into a dict from hook name to return value.
|
||||
|
||||
If the hook name is not present in the hooks configuration, then don't call the function for it,
|
||||
and omit it from the return values.
|
||||
|
||||
Raise ValueError if the hook name is unknown.
|
||||
Raise AttributeError if the function name is not found in the module.
|
||||
Raise anything else that a called function raises. An error stops calls to subsequent functions.
|
||||
'''
|
||||
return {
|
||||
hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs)
|
||||
for hook_name in hook_names
|
||||
if hook_name in hooks
|
||||
}
|
|
@ -1,8 +1,11 @@
|
|||
import glob
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DATABASE_HOOK_NAMES = ('postgresql_databases', 'mysql_databases')
|
||||
|
||||
|
||||
def make_database_dump_filename(dump_path, name, hostname=None):
|
||||
'''
|
||||
|
@ -52,3 +55,48 @@ def remove_database_dumps(dump_path, databases, database_type_name, log_prefix,
|
|||
|
||||
if len(os.listdir(dump_path)) == 0:
|
||||
os.rmdir(dump_path)
|
||||
|
||||
|
||||
def convert_glob_patterns_to_borg_patterns(patterns):
|
||||
'''
|
||||
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
||||
patterns like "sh:etc/*".
|
||||
'''
|
||||
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
|
||||
|
||||
|
||||
def get_database_names_from_dumps(patterns):
|
||||
'''
|
||||
Given a sequence of database dump patterns, find the corresponding database dumps on disk and
|
||||
return the database names from their filenames.
|
||||
'''
|
||||
return [os.path.basename(dump_path) for pattern in patterns for dump_path in glob.glob(pattern)]
|
||||
|
||||
|
||||
def get_database_configurations(databases, names):
|
||||
'''
|
||||
Given the full database configuration dicts as per the configuration schema, and a sequence of
|
||||
database names, filter down and yield the configuration for just the named databases.
|
||||
Additionally, if a database configuration is named "all", project out that configuration for
|
||||
each named database.
|
||||
|
||||
Raise ValueError if one of the database names cannot be matched to a database in borgmatic's
|
||||
database configuration.
|
||||
'''
|
||||
named_databases = {database['name']: database for database in databases}
|
||||
|
||||
for name in names:
|
||||
database = named_databases.get(name)
|
||||
if database:
|
||||
yield database
|
||||
continue
|
||||
|
||||
if 'all' in named_databases:
|
||||
yield {**named_databases['all'], **{'name': name}}
|
||||
continue
|
||||
|
||||
raise ValueError(
|
||||
'Cannot restore database "{}", as it is not defined in borgmatic\'s configuration'.format(
|
||||
name
|
||||
)
|
||||
)
|
||||
|
|
|
@ -14,10 +14,6 @@ def dump_databases(databases, log_prefix, dry_run):
|
|||
dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||
prefix in any log entries. If this is a dry run, then don't actually dump anything.
|
||||
'''
|
||||
if not databases:
|
||||
logger.debug('{}: No MySQL databases configured'.format(log_prefix))
|
||||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
|
||||
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
|
||||
|
@ -48,10 +44,51 @@ def dump_databases(databases, log_prefix, dry_run):
|
|||
)
|
||||
|
||||
|
||||
def remove_database_dumps(databases, log_prefix, dry_run):
|
||||
def remove_database_dumps(databases, log_prefix, dry_run): # pragma: no cover
|
||||
'''
|
||||
Remove the database dumps for the given databases. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the log prefix in
|
||||
any log entries. If this is a dry run, then don't actually remove anything.
|
||||
'''
|
||||
dump.remove_database_dumps(DUMP_PATH, databases, 'MySQL', log_prefix, dry_run)
|
||||
|
||||
|
||||
def make_database_dump_patterns(databases, log_prefix, names):
|
||||
'''
|
||||
Given a sequence of configurations dicts, a prefix to log with, and a sequence of database
|
||||
names to match, return the corresponding glob patterns to match the database dumps in an
|
||||
archive. An empty sequence of names indicates that the patterns should match all dumps.
|
||||
'''
|
||||
return [
|
||||
dump.make_database_dump_filename(DUMP_PATH, name, hostname='*') for name in (names or ['*'])
|
||||
]
|
||||
|
||||
|
||||
def restore_database_dumps(databases, log_prefix, dry_run):
|
||||
'''
|
||||
Restore the given MySQL/MariaDB databases from disk. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||
prefix in any log entries. If this is a dry run, then don't actually restore anything.
|
||||
'''
|
||||
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||
|
||||
for database in databases:
|
||||
dump_filename = dump.make_database_dump_filename(
|
||||
DUMP_PATH, database['name'], database.get('hostname')
|
||||
)
|
||||
restore_command = (
|
||||
('mysql', '--batch')
|
||||
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
||||
+ (('--port', str(database['port'])) if 'port' in database else ())
|
||||
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
||||
+ (('--user', database['username']) if 'username' in database else ())
|
||||
)
|
||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||
|
||||
logger.debug(
|
||||
'{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||
)
|
||||
if not dry_run:
|
||||
execute_command(
|
||||
restore_command, input_file=open(dump_filename), extra_environment=extra_environment
|
||||
)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import glob
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
@ -15,10 +14,6 @@ def dump_databases(databases, log_prefix, dry_run):
|
|||
one dict describing each database as per the configuration schema. Use the given log prefix in
|
||||
any log entries. If this is a dry run, then don't actually dump anything.
|
||||
'''
|
||||
if not databases:
|
||||
logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
|
||||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
|
||||
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
||||
|
@ -49,7 +44,7 @@ def dump_databases(databases, log_prefix, dry_run):
|
|||
execute_command(command, extra_environment=extra_environment)
|
||||
|
||||
|
||||
def remove_database_dumps(databases, log_prefix, dry_run):
|
||||
def remove_database_dumps(databases, log_prefix, dry_run): # pragma: no cover
|
||||
'''
|
||||
Remove the database dumps for the given databases. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the log prefix in
|
||||
|
@ -58,72 +53,23 @@ def remove_database_dumps(databases, log_prefix, dry_run):
|
|||
dump.remove_database_dumps(DUMP_PATH, databases, 'PostgreSQL', log_prefix, dry_run)
|
||||
|
||||
|
||||
def make_database_dump_patterns(names):
|
||||
def make_database_dump_patterns(databases, log_prefix, names):
|
||||
'''
|
||||
Given a sequence of database names, return the corresponding glob patterns to match the database
|
||||
dumps in an archive. An empty sequence of names indicates that the patterns should match all
|
||||
dumps.
|
||||
Given a sequence of configurations dicts, a prefix to log with, and a sequence of database
|
||||
names to match, return the corresponding glob patterns to match the database dumps in an
|
||||
archive. An empty sequence of names indicates that the patterns should match all dumps.
|
||||
'''
|
||||
return [
|
||||
dump.make_database_dump_filename(DUMP_PATH, name, hostname='*') for name in (names or ['*'])
|
||||
]
|
||||
|
||||
|
||||
def convert_glob_patterns_to_borg_patterns(patterns):
|
||||
'''
|
||||
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
||||
patterns like "sh:etc/*".
|
||||
'''
|
||||
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
|
||||
|
||||
|
||||
def get_database_names_from_dumps(patterns):
|
||||
'''
|
||||
Given a sequence of database dump patterns, find the corresponding database dumps on disk and
|
||||
return the database names from their filenames.
|
||||
'''
|
||||
return [os.path.basename(dump_path) for pattern in patterns for dump_path in glob.glob(pattern)]
|
||||
|
||||
|
||||
def get_database_configurations(databases, names):
|
||||
'''
|
||||
Given the full database configuration dicts as per the configuration schema, and a sequence of
|
||||
database names, filter down and yield the configuration for just the named databases.
|
||||
Additionally, if a database configuration is named "all", project out that configuration for
|
||||
each named database.
|
||||
|
||||
Raise ValueError if one of the database names cannot be matched to a database in borgmatic's
|
||||
database configuration.
|
||||
'''
|
||||
named_databases = {database['name']: database for database in databases}
|
||||
|
||||
for name in names:
|
||||
database = named_databases.get(name)
|
||||
if database:
|
||||
yield database
|
||||
continue
|
||||
|
||||
if 'all' in named_databases:
|
||||
yield {**named_databases['all'], **{'name': name}}
|
||||
continue
|
||||
|
||||
raise ValueError(
|
||||
'Cannot restore database "{}", as it is not defined in borgmatic\'s configuration'.format(
|
||||
name
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def restore_database_dumps(databases, log_prefix, dry_run):
|
||||
'''
|
||||
Restore the given PostgreSQL databases from disk. The databases are supplied as a sequence of
|
||||
dicts, one dict describing each database as per the configuration schema. Use the given log
|
||||
prefix in any log entries. If this is a dry run, then don't actually restore anything.
|
||||
'''
|
||||
if not databases:
|
||||
logger.debug('{}: No PostgreSQL databases configured'.format(log_prefix))
|
||||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
||||
|
||||
for database in databases:
|
||||
|
|
|
@ -23,13 +23,10 @@ def test_run_configuration_runs_actions_for_each_repository():
|
|||
def test_run_configuration_executes_hooks_for_create_action():
|
||||
flexmock(module.borg_environment).should_receive('initialize')
|
||||
flexmock(module.command).should_receive('execute_hook').twice()
|
||||
flexmock(module.postgresql).should_receive('dump_databases').once()
|
||||
flexmock(module.mysql).should_receive('dump_databases').once()
|
||||
flexmock(module.dispatch).should_receive('call_hooks').twice()
|
||||
flexmock(module.healthchecks).should_receive('ping_healthchecks').twice()
|
||||
flexmock(module.cronitor).should_receive('ping_cronitor').twice()
|
||||
flexmock(module.cronhub).should_receive('ping_cronhub').twice()
|
||||
flexmock(module.postgresql).should_receive('remove_database_dumps').once()
|
||||
flexmock(module.mysql).should_receive('remove_database_dumps').once()
|
||||
flexmock(module).should_receive('run_actions').and_return([])
|
||||
config = {'location': {'repositories': ['foo']}}
|
||||
arguments = {'global': flexmock(dry_run=False), 'create': flexmock()}
|
||||
|
@ -40,8 +37,7 @@ def test_run_configuration_executes_hooks_for_create_action():
|
|||
def test_run_configuration_logs_actions_error():
|
||||
flexmock(module.borg_environment).should_receive('initialize')
|
||||
flexmock(module.command).should_receive('execute_hook')
|
||||
flexmock(module.postgresql).should_receive('dump_databases')
|
||||
flexmock(module.mysql).should_receive('dump_databases')
|
||||
flexmock(module.dispatch).should_receive('call_hooks')
|
||||
flexmock(module.healthchecks).should_receive('ping_healthchecks')
|
||||
flexmock(module.cronitor).should_receive('ping_cronitor')
|
||||
flexmock(module.cronhub).should_receive('ping_cronhub')
|
||||
|
@ -75,6 +71,10 @@ def test_run_configuration_logs_post_hook_error():
|
|||
flexmock(module.command).should_receive('execute_hook').and_return(None).and_raise(
|
||||
OSError
|
||||
).and_return(None)
|
||||
flexmock(module.dispatch).should_receive('call_hooks')
|
||||
flexmock(module.healthchecks).should_receive('ping_healthchecks')
|
||||
flexmock(module.cronitor).should_receive('ping_cronitor')
|
||||
flexmock(module.cronhub).should_receive('ping_cronhub')
|
||||
expected_results = [flexmock()]
|
||||
flexmock(module).should_receive('make_error_log_records').and_return(expected_results)
|
||||
flexmock(module).should_receive('run_actions').and_return([])
|
||||
|
|
68
tests/unit/hooks/test_dispatch.py
Normal file
68
tests/unit/hooks/test_dispatch.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
import sys
|
||||
|
||||
import pytest
|
||||
from flexmock import flexmock
|
||||
|
||||
from borgmatic.hooks import dispatch as module
|
||||
|
||||
|
||||
def hook_function(config, log_prefix, thing, value):
|
||||
'''
|
||||
This test function gets mocked out below.
|
||||
'''
|
||||
pass
|
||||
|
||||
|
||||
def test_call_hook_invokes_module_function_with_arguments_and_returns_value():
|
||||
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()}
|
||||
expected_return_value = flexmock()
|
||||
test_module = sys.modules[__name__]
|
||||
flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module}
|
||||
flexmock(test_module).should_receive('hook_function').with_args(
|
||||
hooks['super_hook'], 'prefix', 55, value=66
|
||||
).and_return(expected_return_value).once()
|
||||
|
||||
return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66)
|
||||
|
||||
assert return_value == expected_return_value
|
||||
|
||||
|
||||
def test_call_hook_without_hook_config_skips_call():
|
||||
hooks = {'other_hook': flexmock()}
|
||||
test_module = sys.modules[__name__]
|
||||
flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module}
|
||||
flexmock(test_module).should_receive('hook_function').never()
|
||||
|
||||
module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66)
|
||||
|
||||
|
||||
def test_call_hook_without_corresponding_module_raises():
|
||||
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()}
|
||||
test_module = sys.modules[__name__]
|
||||
flexmock(module).HOOK_NAME_TO_MODULE = {'other_hook': test_module}
|
||||
flexmock(test_module).should_receive('hook_function').never()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66)
|
||||
|
||||
|
||||
def test_call_hooks_calls_each_hook_and_collects_return_values():
|
||||
hooks = {'super_hook': flexmock(), 'other_hook': flexmock()}
|
||||
expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()}
|
||||
flexmock(module).should_receive('call_hook').and_return(
|
||||
expected_return_values['super_hook']
|
||||
).and_return(expected_return_values['other_hook'])
|
||||
|
||||
return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55)
|
||||
|
||||
assert return_values == expected_return_values
|
||||
|
||||
|
||||
def test_call_hooks_calls_skips_return_values_for_unconfigured_hooks():
|
||||
hooks = {'super_hook': flexmock()}
|
||||
expected_return_values = {'super_hook': flexmock()}
|
||||
flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook'])
|
||||
|
||||
return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55)
|
||||
|
||||
assert return_values == expected_return_values
|
|
@ -52,3 +52,50 @@ def test_remove_database_dumps_with_dry_run_skips_removal():
|
|||
|
||||
def test_remove_database_dumps_without_databases_does_not_raise():
|
||||
module.remove_database_dumps('databases', [], 'SuperDB', 'test.yaml', dry_run=False)
|
||||
|
||||
|
||||
def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash():
|
||||
assert module.convert_glob_patterns_to_borg_patterns(('/etc/foo/bar',)) == ['sh:etc/foo/bar']
|
||||
|
||||
|
||||
def test_get_database_names_from_dumps_gets_names_from_filenames_matching_globs():
|
||||
flexmock(module.glob).should_receive('glob').and_return(
|
||||
('databases/localhost/foo',)
|
||||
).and_return(('databases/localhost/bar',)).and_return(())
|
||||
|
||||
assert module.get_database_names_from_dumps(
|
||||
('databases/*/foo', 'databases/*/bar', 'databases/*/baz')
|
||||
) == ['foo', 'bar']
|
||||
|
||||
|
||||
def test_get_database_configurations_only_produces_named_databases():
|
||||
databases = [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'bar', 'hostname': 'example.com'},
|
||||
{'name': 'baz', 'hostname': 'example.org'},
|
||||
]
|
||||
|
||||
assert list(module.get_database_configurations(databases, ('foo', 'baz'))) == [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'baz', 'hostname': 'example.org'},
|
||||
]
|
||||
|
||||
|
||||
def test_get_database_configurations_matches_all_database():
|
||||
databases = [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'all', 'hostname': 'example.com'},
|
||||
]
|
||||
|
||||
assert list(module.get_database_configurations(databases, ('foo', 'bar', 'baz'))) == [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'bar', 'hostname': 'example.com'},
|
||||
{'name': 'baz', 'hostname': 'example.com'},
|
||||
]
|
||||
|
||||
|
||||
def test_get_database_configurations_with_unknown_database_name_raises():
|
||||
databases = [{'name': 'foo', 'hostname': 'example.org'}]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
list(module.get_database_configurations(databases, ('foo', 'bar')))
|
||||
|
|
|
@ -35,10 +35,6 @@ def test_dump_databases_with_dry_run_skips_mysqldump():
|
|||
module.dump_databases(databases, 'test.yaml', dry_run=True)
|
||||
|
||||
|
||||
def test_dump_databases_without_databases_does_not_raise():
|
||||
module.dump_databases([], 'test.yaml', dry_run=False)
|
||||
|
||||
|
||||
def test_dump_databases_runs_mysqldump_with_hostname_and_port():
|
||||
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
|
||||
output_file = flexmock()
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import pytest
|
||||
from flexmock import flexmock
|
||||
|
||||
from borgmatic.hooks import postgresql as module
|
||||
|
@ -40,10 +39,6 @@ def test_dump_databases_with_dry_run_skips_pg_dump():
|
|||
module.dump_databases(databases, 'test.yaml', dry_run=True)
|
||||
|
||||
|
||||
def test_dump_databases_without_databases_does_not_raise():
|
||||
module.dump_databases([], 'test.yaml', dry_run=False)
|
||||
|
||||
|
||||
def test_dump_databases_runs_pg_dump_with_hostname_and_port():
|
||||
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
|
||||
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
|
||||
|
@ -167,7 +162,7 @@ def test_make_database_dump_patterns_converts_names_to_glob_paths():
|
|||
'databases/*/foo'
|
||||
).and_return('databases/*/bar')
|
||||
|
||||
assert module.make_database_dump_patterns(('foo', 'bar')) == [
|
||||
assert module.make_database_dump_patterns(flexmock(), flexmock(), ('foo', 'bar')) == [
|
||||
'databases/*/foo',
|
||||
'databases/*/bar',
|
||||
]
|
||||
|
@ -178,54 +173,7 @@ def test_make_database_dump_patterns_treats_empty_names_as_matching_all_database
|
|||
module.DUMP_PATH, '*', '*'
|
||||
).and_return('databases/*/*')
|
||||
|
||||
assert module.make_database_dump_patterns(()) == ['databases/*/*']
|
||||
|
||||
|
||||
def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash():
|
||||
assert module.convert_glob_patterns_to_borg_patterns(('/etc/foo/bar',)) == ['sh:etc/foo/bar']
|
||||
|
||||
|
||||
def test_get_database_names_from_dumps_gets_names_from_filenames_matching_globs():
|
||||
flexmock(module.glob).should_receive('glob').and_return(
|
||||
('databases/localhost/foo',)
|
||||
).and_return(('databases/localhost/bar',)).and_return(())
|
||||
|
||||
assert module.get_database_names_from_dumps(
|
||||
('databases/*/foo', 'databases/*/bar', 'databases/*/baz')
|
||||
) == ['foo', 'bar']
|
||||
|
||||
|
||||
def test_get_database_configurations_only_produces_named_databases():
|
||||
databases = [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'bar', 'hostname': 'example.com'},
|
||||
{'name': 'baz', 'hostname': 'example.org'},
|
||||
]
|
||||
|
||||
assert list(module.get_database_configurations(databases, ('foo', 'baz'))) == [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'baz', 'hostname': 'example.org'},
|
||||
]
|
||||
|
||||
|
||||
def test_get_database_configurations_matches_all_database():
|
||||
databases = [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'all', 'hostname': 'example.com'},
|
||||
]
|
||||
|
||||
assert list(module.get_database_configurations(databases, ('foo', 'bar', 'baz'))) == [
|
||||
{'name': 'foo', 'hostname': 'example.org'},
|
||||
{'name': 'bar', 'hostname': 'example.com'},
|
||||
{'name': 'baz', 'hostname': 'example.com'},
|
||||
]
|
||||
|
||||
|
||||
def test_get_database_configurations_with_unknown_database_name_raises():
|
||||
databases = [{'name': 'foo', 'hostname': 'example.org'}]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
list(module.get_database_configurations(databases, ('foo', 'bar')))
|
||||
assert module.make_database_dump_patterns(flexmock(), flexmock(), ()) == ['databases/*/*']
|
||||
|
||||
|
||||
def test_restore_database_dumps_restores_each_database():
|
||||
|
@ -256,10 +204,6 @@ def test_restore_database_dumps_restores_each_database():
|
|||
module.restore_database_dumps(databases, 'test.yaml', dry_run=False)
|
||||
|
||||
|
||||
def test_restore_database_dumps_without_databases_does_not_raise():
|
||||
module.restore_database_dumps({}, 'test.yaml', dry_run=False)
|
||||
|
||||
|
||||
def test_restore_database_dumps_runs_pg_restore_with_hostname_and_port():
|
||||
databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}]
|
||||
flexmock(module.dump).should_receive('make_database_dump_filename').and_return(
|
||||
|
|
|
@ -47,6 +47,7 @@ def test_execute_command_calls_full_command():
|
|||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('Popen').with_args(
|
||||
full_command,
|
||||
stdin=None,
|
||||
stdout=module.subprocess.PIPE,
|
||||
stderr=module.subprocess.STDOUT,
|
||||
shell=False,
|
||||
|
@ -66,6 +67,7 @@ def test_execute_command_calls_full_command_with_output_file():
|
|||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('Popen').with_args(
|
||||
full_command,
|
||||
stdin=None,
|
||||
stdout=output_file,
|
||||
stderr=module.subprocess.PIPE,
|
||||
shell=False,
|
||||
|
@ -79,11 +81,32 @@ def test_execute_command_calls_full_command_with_output_file():
|
|||
assert output is None
|
||||
|
||||
|
||||
def test_execute_command_calls_full_command_with_input_file():
|
||||
full_command = ['foo', 'bar']
|
||||
input_file = flexmock()
|
||||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('Popen').with_args(
|
||||
full_command,
|
||||
stdin=input_file,
|
||||
stdout=module.subprocess.PIPE,
|
||||
stderr=module.subprocess.STDOUT,
|
||||
shell=False,
|
||||
env=None,
|
||||
cwd=None,
|
||||
).and_return(flexmock(stdout=None)).once()
|
||||
flexmock(module).should_receive('log_output')
|
||||
|
||||
output = module.execute_command(full_command, input_file=input_file)
|
||||
|
||||
assert output is None
|
||||
|
||||
|
||||
def test_execute_command_calls_full_command_with_shell():
|
||||
full_command = ['foo', 'bar']
|
||||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('Popen').with_args(
|
||||
full_command,
|
||||
stdin=None,
|
||||
stdout=module.subprocess.PIPE,
|
||||
stderr=module.subprocess.STDOUT,
|
||||
shell=True,
|
||||
|
@ -102,6 +125,7 @@ def test_execute_command_calls_full_command_with_extra_environment():
|
|||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('Popen').with_args(
|
||||
full_command,
|
||||
stdin=None,
|
||||
stdout=module.subprocess.PIPE,
|
||||
stderr=module.subprocess.STDOUT,
|
||||
shell=False,
|
||||
|
@ -120,6 +144,7 @@ def test_execute_command_calls_full_command_with_working_directory():
|
|||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('Popen').with_args(
|
||||
full_command,
|
||||
stdin=None,
|
||||
stdout=module.subprocess.PIPE,
|
||||
stderr=module.subprocess.STDOUT,
|
||||
shell=False,
|
||||
|
|
Loading…
Reference in a new issue