2019-11-08 20:17:52 +01:00
|
|
|
import logging
|
|
|
|
|
2020-05-07 20:44:04 +02:00
|
|
|
from borgmatic.execute import execute_command, execute_command_with_processes
|
2019-11-08 20:53:27 +01:00
|
|
|
from borgmatic.hooks import dump
|
2019-11-08 20:17:52 +01:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-12-11 01:04:34 +01:00
|
|
|
def make_dump_path(location_config): # pragma: no cover
|
|
|
|
'''
|
|
|
|
Make the dump path from the given location configuration and the name of this hook.
|
|
|
|
'''
|
|
|
|
return dump.make_database_dump_path(
|
|
|
|
location_config.get('borgmatic_source_directory'), 'mysql_databases'
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-04-22 21:17:22 +02:00
|
|
|
SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys')
|
|
|
|
|
|
|
|
|
|
|
|
def database_names_to_dump(database, extra_environment, log_prefix, dry_run_label):
|
|
|
|
'''
|
|
|
|
Given a requested database name, return the corresponding sequence of database names to dump.
|
|
|
|
In the case of "all", query for the names of databases on the configured host and return them,
|
|
|
|
excluding any system databases that will cause problems during restore.
|
|
|
|
'''
|
|
|
|
requested_name = database['name']
|
|
|
|
|
|
|
|
if requested_name != 'all':
|
|
|
|
return (requested_name,)
|
|
|
|
|
|
|
|
show_command = (
|
|
|
|
('mysql',)
|
2021-11-12 20:49:09 +01:00
|
|
|
+ (tuple(database['list_options'].split(' ')) if 'list_options' in database else ())
|
2020-04-22 21:17:22 +02:00
|
|
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
|
|
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
|
|
|
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
|
|
|
+ (('--user', database['username']) if 'username' in database else ())
|
|
|
|
+ ('--skip-column-names', '--batch')
|
|
|
|
+ ('--execute', 'show schemas')
|
|
|
|
)
|
|
|
|
logger.debug(
|
|
|
|
'{}: Querying for "all" MySQL databases to dump{}'.format(log_prefix, dry_run_label)
|
|
|
|
)
|
|
|
|
show_output = execute_command(
|
|
|
|
show_command, output_log_level=None, extra_environment=extra_environment
|
|
|
|
)
|
|
|
|
|
|
|
|
return tuple(
|
|
|
|
show_name
|
|
|
|
for show_name in show_output.strip().splitlines()
|
|
|
|
if show_name not in SYSTEM_DATABASE_NAMES
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-12-11 01:04:34 +01:00
|
|
|
def dump_databases(databases, log_prefix, location_config, dry_run):
|
2019-11-08 20:17:52 +01:00
|
|
|
'''
|
2020-05-07 20:44:04 +02:00
|
|
|
Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence
|
|
|
|
of dicts, one dict describing each database as per the configuration schema. Use the given log
|
2019-12-11 01:04:34 +01:00
|
|
|
prefix in any log entries. Use the given location configuration dict to construct the
|
2020-05-07 20:44:04 +02:00
|
|
|
destination path.
|
|
|
|
|
|
|
|
Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named
|
|
|
|
pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence.
|
2019-11-08 20:17:52 +01:00
|
|
|
'''
|
|
|
|
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
2020-05-07 20:44:04 +02:00
|
|
|
processes = []
|
2019-11-08 20:17:52 +01:00
|
|
|
|
|
|
|
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
|
|
|
|
|
|
|
|
for database in databases:
|
2020-04-22 21:17:22 +02:00
|
|
|
requested_name = database['name']
|
2019-12-11 01:04:34 +01:00
|
|
|
dump_filename = dump.make_database_dump_filename(
|
2020-04-22 21:17:22 +02:00
|
|
|
make_dump_path(location_config), requested_name, database.get('hostname')
|
2019-12-11 01:04:34 +01:00
|
|
|
)
|
2020-04-22 21:17:22 +02:00
|
|
|
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
2020-05-26 17:59:04 +02:00
|
|
|
dump_database_names = database_names_to_dump(
|
2020-04-22 21:17:22 +02:00
|
|
|
database, extra_environment, log_prefix, dry_run_label
|
|
|
|
)
|
2020-05-26 17:59:04 +02:00
|
|
|
if not dump_database_names:
|
|
|
|
raise ValueError('Cannot find any MySQL databases to dump.')
|
2020-04-22 21:17:22 +02:00
|
|
|
|
|
|
|
dump_command = (
|
2020-05-07 20:44:04 +02:00
|
|
|
('mysqldump',)
|
2021-11-26 17:16:03 +01:00
|
|
|
+ (tuple(database['options'].split(' ')) if 'options' in database else ())
|
2020-05-07 20:44:04 +02:00
|
|
|
+ ('--add-drop-database',)
|
2019-11-08 20:17:52 +01:00
|
|
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
|
|
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
|
|
|
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
|
|
|
+ (('--user', database['username']) if 'username' in database else ())
|
2020-04-22 21:17:22 +02:00
|
|
|
+ ('--databases',)
|
2020-05-26 17:59:04 +02:00
|
|
|
+ dump_database_names
|
2020-05-07 20:44:04 +02:00
|
|
|
# Use shell redirection rather than execute_command(output_file=open(...)) to prevent
|
|
|
|
# the open() call on a named pipe from hanging the main borgmatic process.
|
|
|
|
+ ('>', dump_filename)
|
2019-11-08 20:17:52 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
'{}: Dumping MySQL database {} to {}{}'.format(
|
2020-04-22 21:17:22 +02:00
|
|
|
log_prefix, requested_name, dump_filename, dry_run_label
|
2019-11-08 20:17:52 +01:00
|
|
|
)
|
|
|
|
)
|
2020-05-07 20:44:04 +02:00
|
|
|
if dry_run:
|
|
|
|
continue
|
|
|
|
|
|
|
|
dump.create_named_pipe_for_dump(dump_filename)
|
|
|
|
|
|
|
|
processes.append(
|
2019-11-08 20:17:52 +01:00
|
|
|
execute_command(
|
2020-04-22 21:17:22 +02:00
|
|
|
dump_command,
|
2020-05-07 20:44:04 +02:00
|
|
|
shell=True,
|
2020-04-22 21:17:22 +02:00
|
|
|
extra_environment=extra_environment,
|
2020-05-07 20:44:04 +02:00
|
|
|
run_to_completion=False,
|
2019-11-08 20:17:52 +01:00
|
|
|
)
|
2020-05-07 20:44:04 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return processes
|
2019-11-08 20:53:27 +01:00
|
|
|
|
|
|
|
|
2019-12-11 01:04:34 +01:00
|
|
|
def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover
|
2019-11-08 20:53:27 +01:00
|
|
|
'''
|
2020-06-02 21:40:32 +02:00
|
|
|
Remove all database dump files for this hook regardless of the given databases. Use the log
|
|
|
|
prefix in any log entries. Use the given location configuration dict to construct the
|
|
|
|
destination path. If this is a dry run, then don't actually remove anything.
|
2019-11-08 20:53:27 +01:00
|
|
|
'''
|
2020-06-02 21:40:32 +02:00
|
|
|
dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run)
|
2019-11-12 06:59:30 +01:00
|
|
|
|
|
|
|
|
2020-05-07 21:14:27 +02:00
|
|
|
def make_database_dump_pattern(
|
|
|
|
databases, log_prefix, location_config, name=None
|
|
|
|
): # pragma: no cover
|
2019-11-12 06:59:30 +01:00
|
|
|
'''
|
2019-12-11 01:04:34 +01:00
|
|
|
Given a sequence of configurations dicts, a prefix to log with, a location configuration dict,
|
2020-05-07 20:44:04 +02:00
|
|
|
and a database name to match, return the corresponding glob patterns to match the database dump
|
|
|
|
in an archive.
|
2019-11-12 06:59:30 +01:00
|
|
|
'''
|
2020-05-07 20:44:04 +02:00
|
|
|
return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*')
|
2019-11-12 06:59:30 +01:00
|
|
|
|
|
|
|
|
2020-05-07 20:44:04 +02:00
|
|
|
def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process):
|
2019-11-12 06:59:30 +01:00
|
|
|
'''
|
2020-05-07 20:44:04 +02:00
|
|
|
Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a
|
|
|
|
one-element sequence containing a dict describing the database, as per the configuration schema.
|
|
|
|
Use the given log prefix in any log entries. If this is a dry run, then don't actually restore
|
|
|
|
anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce
|
|
|
|
output to consume.
|
2019-11-12 06:59:30 +01:00
|
|
|
'''
|
|
|
|
dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else ''
|
|
|
|
|
2020-05-07 20:44:04 +02:00
|
|
|
if len(database_config) != 1:
|
|
|
|
raise ValueError('The database configuration value is invalid')
|
2019-11-12 06:59:30 +01:00
|
|
|
|
2020-05-07 20:44:04 +02:00
|
|
|
database = database_config[0]
|
|
|
|
restore_command = (
|
2021-12-09 01:40:25 +01:00
|
|
|
('mysql', '--batch')
|
2020-05-07 20:44:04 +02:00
|
|
|
+ (('--host', database['hostname']) if 'hostname' in database else ())
|
|
|
|
+ (('--port', str(database['port'])) if 'port' in database else ())
|
|
|
|
+ (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ())
|
|
|
|
+ (('--user', database['username']) if 'username' in database else ())
|
|
|
|
)
|
|
|
|
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
|
|
|
|
|
|
|
logger.debug(
|
|
|
|
'{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
|
|
|
)
|
|
|
|
if dry_run:
|
|
|
|
return
|
|
|
|
|
|
|
|
execute_command_with_processes(
|
|
|
|
restore_command,
|
|
|
|
[extract_process],
|
2020-05-09 04:38:33 +02:00
|
|
|
output_log_level=logging.DEBUG,
|
2020-05-07 20:44:04 +02:00
|
|
|
input_file=extract_process.stdout,
|
|
|
|
extra_environment=extra_environment,
|
2020-05-15 07:38:38 +02:00
|
|
|
borg_local_path=location_config.get('local_path', 'borg'),
|
2020-05-07 20:44:04 +02:00
|
|
|
)
|