Merge branch 'master' of https://github.com/diivi/borgmatic into feat/tag-repos
This commit is contained in:
commit
1bc003560a
64 changed files with 286 additions and 305 deletions
|
@ -1,4 +1,5 @@
|
|||
const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight");
|
||||
const codeClipboard = require("eleventy-plugin-code-clipboard");
|
||||
const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language");
|
||||
const navigationPlugin = require("@11ty/eleventy-navigation");
|
||||
|
||||
|
@ -6,6 +7,7 @@ module.exports = function(eleventyConfig) {
|
|||
eleventyConfig.addPlugin(pluginSyntaxHighlight);
|
||||
eleventyConfig.addPlugin(inclusiveLangPlugin);
|
||||
eleventyConfig.addPlugin(navigationPlugin);
|
||||
eleventyConfig.addPlugin(codeClipboard);
|
||||
|
||||
let markdownIt = require("markdown-it");
|
||||
let markdownItAnchor = require("markdown-it-anchor");
|
||||
|
@ -31,6 +33,7 @@ module.exports = function(eleventyConfig) {
|
|||
markdownIt(markdownItOptions)
|
||||
.use(markdownItAnchor, markdownItAnchorOptions)
|
||||
.use(markdownItReplaceLink)
|
||||
.use(codeClipboard.markdownItCopyButton)
|
||||
);
|
||||
|
||||
eleventyConfig.addPassthroughCopy({"docs/static": "static"});
|
||||
|
|
4
NEWS
4
NEWS
|
@ -4,6 +4,10 @@
|
|||
* #576: Add support for "file://" paths within "repositories" option.
|
||||
* #618: Add support for BORG_FILES_CACHE_TTL environment variable via "borg_files_cache_ttl" option
|
||||
in borgmatic's storage configuration.
|
||||
* #623: Fix confusing message when an error occurs running actions for a configuration file.
|
||||
* #655: Fix error when databases are configured and a source directory doesn't exist.
|
||||
* Add code style plugins to enforce use of Python f-strings and prevent single-letter variables.
|
||||
To join in the pedantry, refresh your test environment with "tox --recreate".
|
||||
|
||||
1.7.9
|
||||
* #295: Add a SQLite database dump/restore hook.
|
||||
|
|
|
@ -16,7 +16,7 @@ def run_borg(
|
|||
if borg_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||
repository, borg_arguments.repository
|
||||
):
|
||||
logger.info('{}: Running arbitrary Borg command'.format(repository['path']))
|
||||
logger.info(f'{repository["path"]}: Running arbitrary Borg command')
|
||||
archive_name = borgmatic.borg.rlist.resolve_archive_name(
|
||||
repository['path'],
|
||||
borg_arguments.archive,
|
||||
|
|
|
@ -37,7 +37,7 @@ def run_check(
|
|||
global_arguments.dry_run,
|
||||
**hook_context,
|
||||
)
|
||||
logger.info('{}: Running consistency checks'.format(repository['path']))
|
||||
logger.info(f'{repository["path"]}: Running consistency checks')
|
||||
borgmatic.borg.check.check_archives(
|
||||
repository['path'],
|
||||
location,
|
||||
|
|
|
@ -39,7 +39,7 @@ def run_compact(
|
|||
**hook_context,
|
||||
)
|
||||
if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version):
|
||||
logger.info('{}: Compacting segments{}'.format(repository['path'], dry_run_label))
|
||||
logger.info(f'{repository["path"]}: Compacting segments{dry_run_label}')
|
||||
borgmatic.borg.compact.compact_segments(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
|
@ -52,9 +52,13 @@ def run_compact(
|
|||
threshold=compact_arguments.threshold,
|
||||
)
|
||||
else: # pragma: nocover
|
||||
<<<<<<< HEAD
|
||||
logger.info(
|
||||
'{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository['path'])
|
||||
)
|
||||
=======
|
||||
logger.info(f'{repository}: Skipping compact (only available/needed in Borg 1.2+)')
|
||||
>>>>>>> f42890430c59a40a17d9a68a193d6a09674770cb
|
||||
borgmatic.hooks.command.execute_hook(
|
||||
hooks.get('after_compact'),
|
||||
hooks.get('umask'),
|
||||
|
|
|
@ -42,7 +42,7 @@ def run_create(
|
|||
global_arguments.dry_run,
|
||||
**hook_context,
|
||||
)
|
||||
logger.info('{}: Creating archive{}'.format(repository['path'], dry_run_label))
|
||||
logger.info(f'{repository["path"]}: Creating archive{dry_run_label}')
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_database_dumps',
|
||||
hooks,
|
||||
|
|
|
@ -22,11 +22,7 @@ def run_export_tar(
|
|||
if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||
repository, export_tar_arguments.repository
|
||||
):
|
||||
logger.info(
|
||||
'{}: Exporting archive {} as tar file'.format(
|
||||
repository['path'], export_tar_arguments.archive
|
||||
)
|
||||
)
|
||||
logger.info(f'{repository["path"]}: Exporting archive {export_tar_arguments.archive} as tar file')
|
||||
borgmatic.borg.export_tar.export_tar_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
|
|
|
@ -35,9 +35,7 @@ def run_extract(
|
|||
if extract_arguments.repository is None or borgmatic.config.validate.repositories_match(
|
||||
repository, extract_arguments.repository
|
||||
):
|
||||
logger.info(
|
||||
'{}: Extracting archive {}'.format(repository['path'], extract_arguments.archive)
|
||||
)
|
||||
logger.info(f'{repository["path"]}: Extracting archive {extract_arguments.archive}')
|
||||
borgmatic.borg.extract.extract_archive(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
|
|
|
@ -17,11 +17,9 @@ def run_mount(
|
|||
repository, mount_arguments.repository
|
||||
):
|
||||
if mount_arguments.archive:
|
||||
logger.info(
|
||||
'{}: Mounting archive {}'.format(repository['path'], mount_arguments.archive)
|
||||
)
|
||||
logger.info(f'{repository["path"]}: Mounting archive {mount_arguments.archive}')
|
||||
else: # pragma: nocover
|
||||
logger.info('{}: Mounting repository'.format(repository['path']))
|
||||
logger.info(f'{repository["path"]}: Mounting repository')
|
||||
|
||||
borgmatic.borg.mount.mount_archive(
|
||||
repository['path'],
|
||||
|
|
|
@ -37,7 +37,7 @@ def run_prune(
|
|||
global_arguments.dry_run,
|
||||
**hook_context,
|
||||
)
|
||||
logger.info('{}: Pruning archives{}'.format(repository['path'], dry_run_label))
|
||||
logger.info(f'{repository["path"]}: Pruning archives{dry_run_label}')
|
||||
borgmatic.borg.prune.prune_archives(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
|
|
|
@ -23,7 +23,7 @@ def run_rcreate(
|
|||
):
|
||||
return
|
||||
|
||||
logger.info('{}: Creating repository'.format(repository['path']))
|
||||
logger.info(f'{repository["path"]}: Creating repository')
|
||||
borgmatic.borg.rcreate.create_repository(
|
||||
global_arguments.dry_run,
|
||||
repository['path'],
|
||||
|
|
|
@ -255,11 +255,8 @@ def run_restore(
|
|||
):
|
||||
return
|
||||
|
||||
logger.info(
|
||||
'{}: Restoring databases from archive {}'.format(
|
||||
repository['path'], restore_arguments.archive
|
||||
)
|
||||
)
|
||||
logger.info(f'{repository["path"]}: Restoring databases from archive {restore_arguments.archive}')
|
||||
|
||||
borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured(
|
||||
'remove_database_dumps',
|
||||
hooks,
|
||||
|
|
|
@ -19,9 +19,8 @@ def run_rinfo(
|
|||
repository, rinfo_arguments.repository
|
||||
):
|
||||
if not rinfo_arguments.json: # pragma: nocover
|
||||
logger.answer(
|
||||
'{}: Displaying repository summary information'.format(repository['path'])
|
||||
)
|
||||
logger.answer(f'{repository["path"]}: Displaying repository summary information')
|
||||
|
||||
json_output = borgmatic.borg.rinfo.display_repository_info(
|
||||
repository['path'],
|
||||
storage,
|
||||
|
|
|
@ -19,7 +19,8 @@ def run_rlist(
|
|||
repository, rlist_arguments.repository
|
||||
):
|
||||
if not rlist_arguments.json: # pragma: nocover
|
||||
logger.answer('{}: Listing repository'.format(repository['path']))
|
||||
logger.answer(f'{repository["path"]}: Listing repository')
|
||||
|
||||
json_output = borgmatic.borg.rlist.list_repository(
|
||||
repository['path'],
|
||||
storage,
|
||||
|
|
|
@ -12,7 +12,7 @@ DEFAULT_CHECKS = (
|
|||
{'name': 'repository', 'frequency': '1 month'},
|
||||
{'name': 'archives', 'frequency': '1 month'},
|
||||
)
|
||||
DEFAULT_PREFIX = '{hostname}-'
|
||||
DEFAULT_PREFIX = '{hostname}-' # noqa: FS003
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -196,7 +196,7 @@ def make_check_flags(local_borg_version, checks, check_last=None, prefix=None):
|
|||
return common_flags
|
||||
|
||||
return (
|
||||
tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives'))
|
||||
tuple(f'--{check}-only' for check in checks if check in ('repository', 'archives'))
|
||||
+ common_flags
|
||||
)
|
||||
|
||||
|
|
|
@ -217,7 +217,7 @@ def make_list_filter_flags(local_borg_version, dry_run):
|
|||
return f'{base_flags}-'
|
||||
|
||||
|
||||
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003
|
||||
|
||||
|
||||
def collect_borgmatic_source_directories(borgmatic_source_directory):
|
||||
|
|
|
@ -56,7 +56,7 @@ def export_tar_archive(
|
|||
output_log_level = logging.INFO
|
||||
|
||||
if dry_run:
|
||||
logging.info('{}: Skipping export to tar file (dry run)'.format(repository))
|
||||
logging.info(f'{repository}: Skipping export to tar file (dry run)')
|
||||
return
|
||||
|
||||
execute_command(
|
||||
|
|
|
@ -10,7 +10,7 @@ def make_flags(name, value):
|
|||
if not value:
|
||||
return ()
|
||||
|
||||
flag = '--{}'.format(name.replace('_', '-'))
|
||||
flag = f"--{name.replace('_', '-')}"
|
||||
|
||||
if value is True:
|
||||
return (flag,)
|
||||
|
|
|
@ -113,7 +113,7 @@ def capture_archive_listing(
|
|||
paths=[f'sh:{list_path}'],
|
||||
find_paths=None,
|
||||
json=None,
|
||||
format='{path}{NL}',
|
||||
format='{path}{NL}', # noqa: FS003
|
||||
),
|
||||
local_path,
|
||||
remote_path,
|
||||
|
|
|
@ -24,7 +24,7 @@ def make_prune_flags(retention_config, local_borg_version):
|
|||
)
|
||||
'''
|
||||
config = retention_config.copy()
|
||||
prefix = config.pop('prefix', '{hostname}-')
|
||||
prefix = config.pop('prefix', '{hostname}-') # noqa: FS003
|
||||
|
||||
if prefix:
|
||||
if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version):
|
||||
|
|
|
@ -42,7 +42,7 @@ def resolve_archive_name(
|
|||
except IndexError:
|
||||
raise ValueError('No archives found in the repository')
|
||||
|
||||
logger.debug('{}: Latest archive is {}'.format(repository, latest_archive))
|
||||
logger.debug(f'{repository}: Latest archive is {latest_archive}')
|
||||
|
||||
return latest_archive
|
||||
|
||||
|
@ -117,7 +117,7 @@ def list_repository(
|
|||
)
|
||||
|
||||
if rlist_arguments.json:
|
||||
return execute_command_and_capture_output(main_command, extra_environment=borg_environment,)
|
||||
return execute_command_and_capture_output(main_command, extra_environment=borg_environment)
|
||||
else:
|
||||
execute_command(
|
||||
main_command,
|
||||
|
|
|
@ -131,9 +131,7 @@ def make_parsers():
|
|||
nargs='*',
|
||||
dest='config_paths',
|
||||
default=config_paths,
|
||||
help='Configuration filenames or directories, defaults to: {}'.format(
|
||||
' '.join(unexpanded_config_paths)
|
||||
),
|
||||
help=f"Configuration filenames or directories, defaults to: {' '.join(unexpanded_config_paths)}",
|
||||
)
|
||||
global_group.add_argument(
|
||||
'--excludes',
|
||||
|
|
|
@ -70,9 +70,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
try:
|
||||
local_borg_version = borg_version.local_borg_version(storage, local_path)
|
||||
except (OSError, CalledProcessError, ValueError) as error:
|
||||
yield from log_error_records(
|
||||
'{}: Error getting local Borg version'.format(config_filename), error
|
||||
)
|
||||
yield from log_error_records(f'{config_filename}: Error getting local Borg version', error)
|
||||
return
|
||||
|
||||
try:
|
||||
|
@ -100,7 +98,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
return
|
||||
|
||||
encountered_error = error
|
||||
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||
yield from log_error_records(f'{config_filename}: Error pinging monitor', error)
|
||||
|
||||
if not encountered_error:
|
||||
repo_queue = Queue()
|
||||
|
@ -134,7 +132,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
repo_queue.put((repository, retry_num + 1),)
|
||||
tuple( # Consume the generator so as to trigger logging.
|
||||
log_error_records(
|
||||
'{}: Error running actions for repository'.format(repository['path']),
|
||||
f'{repository["path"]}: Error running actions for repository',
|
||||
error,
|
||||
levelno=logging.WARNING,
|
||||
log_command_error_output=True,
|
||||
|
@ -149,7 +147,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
return
|
||||
|
||||
yield from log_error_records(
|
||||
'{}: Error running actions for repository'.format(repository['path']), error
|
||||
f'{repository["path"]}: Error running actions for repository', error
|
||||
)
|
||||
encountered_error = error
|
||||
error_repository = repository['path']
|
||||
|
@ -171,7 +169,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
return
|
||||
|
||||
encountered_error = error
|
||||
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||
yield from log_error_records(f'{repository_path}: Error pinging monitor', error)
|
||||
|
||||
if not encountered_error:
|
||||
try:
|
||||
|
@ -198,7 +196,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
return
|
||||
|
||||
encountered_error = error
|
||||
yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error)
|
||||
yield from log_error_records(f'{config_filename}: Error pinging monitor', error)
|
||||
|
||||
if encountered_error and using_primary_action:
|
||||
try:
|
||||
|
@ -233,9 +231,7 @@ def run_configuration(config_filename, config, arguments):
|
|||
if command.considered_soft_failure(config_filename, error):
|
||||
return
|
||||
|
||||
yield from log_error_records(
|
||||
'{}: Error running on-error hook'.format(config_filename), error
|
||||
)
|
||||
yield from log_error_records(f'{config_filename}: Error running on-error hook', error)
|
||||
|
||||
|
||||
def run_actions(
|
||||
|
@ -476,9 +472,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True):
|
|||
dict(
|
||||
levelno=logging.WARNING,
|
||||
levelname='WARNING',
|
||||
msg='{}: Insufficient permissions to read configuration file'.format(
|
||||
config_filename
|
||||
),
|
||||
msg=f'{config_filename}: Insufficient permissions to read configuration file',
|
||||
)
|
||||
),
|
||||
]
|
||||
|
@ -490,7 +484,7 @@ def load_configurations(config_filenames, overrides=None, resolve_env=True):
|
|||
dict(
|
||||
levelno=logging.CRITICAL,
|
||||
levelname='CRITICAL',
|
||||
msg='{}: Error parsing configuration file'.format(config_filename),
|
||||
msg=f'{config_filename}: Error parsing configuration file',
|
||||
)
|
||||
),
|
||||
logging.makeLogRecord(
|
||||
|
@ -591,9 +585,7 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
|||
|
||||
if not configs:
|
||||
yield from log_error_records(
|
||||
'{}: No valid configuration files found'.format(
|
||||
' '.join(arguments['global'].config_paths)
|
||||
)
|
||||
r"{' '.join(arguments['global'].config_paths)}: No valid configuration files found",
|
||||
)
|
||||
return
|
||||
|
||||
|
@ -619,23 +611,21 @@ def collect_configuration_run_summary_logs(configs, arguments):
|
|||
error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord))
|
||||
|
||||
if error_logs:
|
||||
yield from log_error_records(
|
||||
'{}: Error running configuration file'.format(config_filename)
|
||||
)
|
||||
yield from log_error_records(f'{config_filename}: An error occurred')
|
||||
yield from error_logs
|
||||
else:
|
||||
yield logging.makeLogRecord(
|
||||
dict(
|
||||
levelno=logging.INFO,
|
||||
levelname='INFO',
|
||||
msg='{}: Successfully ran configuration file'.format(config_filename),
|
||||
msg=f'{config_filename}: Successfully ran configuration file',
|
||||
)
|
||||
)
|
||||
if results:
|
||||
json_results.extend(results)
|
||||
|
||||
if 'umount' in arguments:
|
||||
logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point))
|
||||
logger.info(f"Unmounting mount point {arguments['umount'].mount_point}")
|
||||
try:
|
||||
borg_umount.unmount_archive(
|
||||
mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs),
|
||||
|
@ -683,7 +673,7 @@ def main(): # pragma: no cover
|
|||
if error.code == 0:
|
||||
raise error
|
||||
configure_logging(logging.CRITICAL)
|
||||
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
|
||||
logger.critical(f"Error parsing arguments: {' '.join(sys.argv)}")
|
||||
exit_with_help_link()
|
||||
|
||||
global_arguments = arguments['global']
|
||||
|
@ -716,7 +706,7 @@ def main(): # pragma: no cover
|
|||
)
|
||||
except (FileNotFoundError, PermissionError) as error:
|
||||
configure_logging(logging.CRITICAL)
|
||||
logger.critical('Error configuring logging: {}'.format(error))
|
||||
logger.critical(f'Error configuring logging: {error}')
|
||||
exit_with_help_link()
|
||||
|
||||
logger.debug('Ensuring legacy configuration is upgraded')
|
||||
|
|
|
@ -34,7 +34,7 @@ def bash_completion():
|
|||
' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"',
|
||||
' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"',
|
||||
' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];'
|
||||
' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE,
|
||||
f' then cat << EOF\n{UPGRADE_MESSAGE}\nEOF',
|
||||
' fi',
|
||||
'}',
|
||||
'complete_borgmatic() {',
|
||||
|
@ -48,7 +48,7 @@ def bash_completion():
|
|||
for action, subparser in subparsers.choices.items()
|
||||
)
|
||||
+ (
|
||||
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))'
|
||||
' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' # noqa: FS003
|
||||
% (actions, global_flags),
|
||||
' (check_version &)',
|
||||
'}',
|
||||
|
|
|
@ -28,9 +28,7 @@ def parse_arguments(*arguments):
|
|||
'--source-config',
|
||||
dest='source_config_filename',
|
||||
default=DEFAULT_SOURCE_CONFIG_FILENAME,
|
||||
help='Source INI-style configuration filename. Default: {}'.format(
|
||||
DEFAULT_SOURCE_CONFIG_FILENAME
|
||||
),
|
||||
help=f'Source INI-style configuration filename. Default: {DEFAULT_SOURCE_CONFIG_FILENAME}',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
|
@ -46,9 +44,7 @@ def parse_arguments(*arguments):
|
|||
'--destination-config',
|
||||
dest='destination_config_filename',
|
||||
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||
help='Destination YAML configuration filename. Default: {}'.format(
|
||||
DEFAULT_DESTINATION_CONFIG_FILENAME
|
||||
),
|
||||
help=f'Destination YAML configuration filename. Default: {DEFAULT_DESTINATION_CONFIG_FILENAME}',
|
||||
)
|
||||
|
||||
return parser.parse_args(arguments)
|
||||
|
@ -59,19 +55,15 @@ TEXT_WRAP_CHARACTERS = 80
|
|||
|
||||
def display_result(args): # pragma: no cover
|
||||
result_lines = textwrap.wrap(
|
||||
'Your borgmatic configuration has been upgraded. Please review the result in {}.'.format(
|
||||
args.destination_config_filename
|
||||
),
|
||||
f'Your borgmatic configuration has been upgraded. Please review the result in {args.destination_config_filename}.',
|
||||
TEXT_WRAP_CHARACTERS,
|
||||
)
|
||||
|
||||
excludes_phrase = (
|
||||
f' and {args.source_excludes_filename}' if args.source_excludes_filename else ''
|
||||
)
|
||||
delete_lines = textwrap.wrap(
|
||||
'Once you are satisfied, you can safely delete {}{}.'.format(
|
||||
args.source_config_filename,
|
||||
' and {}'.format(args.source_excludes_filename)
|
||||
if args.source_excludes_filename
|
||||
else '',
|
||||
),
|
||||
f'Once you are satisfied, you can safely delete {args.source_config_filename}{excludes_phrase}.',
|
||||
TEXT_WRAP_CHARACTERS,
|
||||
)
|
||||
|
||||
|
|
|
@ -23,9 +23,7 @@ def parse_arguments(*arguments):
|
|||
'--destination',
|
||||
dest='destination_filename',
|
||||
default=DEFAULT_DESTINATION_CONFIG_FILENAME,
|
||||
help='Destination YAML configuration file, default: {}'.format(
|
||||
DEFAULT_DESTINATION_CONFIG_FILENAME
|
||||
),
|
||||
help=f'Destination YAML configuration file, default: {DEFAULT_DESTINATION_CONFIG_FILENAME}',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--overwrite',
|
||||
|
@ -48,17 +46,13 @@ def main(): # pragma: no cover
|
|||
overwrite=args.overwrite,
|
||||
)
|
||||
|
||||
print('Generated a sample configuration file at {}.'.format(args.destination_filename))
|
||||
print(f'Generated a sample configuration file at {args.destination_filename}.')
|
||||
print()
|
||||
if args.source_filename:
|
||||
print(
|
||||
'Merged in the contents of configuration file at {}.'.format(args.source_filename)
|
||||
)
|
||||
print(f'Merged in the contents of configuration file at {args.source_filename}.')
|
||||
print('To review the changes made, run:')
|
||||
print()
|
||||
print(
|
||||
' diff --unified {} {}'.format(args.source_filename, args.destination_filename)
|
||||
)
|
||||
print(f' diff --unified {args.source_filename} {args.destination_filename}')
|
||||
print()
|
||||
print('This includes all available configuration options with example values. The few')
|
||||
print('required options are indicated. Please edit the file to suit your needs.')
|
||||
|
|
|
@ -21,9 +21,7 @@ def parse_arguments(*arguments):
|
|||
nargs='+',
|
||||
dest='config_paths',
|
||||
default=config_paths,
|
||||
help='Configuration filenames or directories, defaults to: {}'.format(
|
||||
' '.join(config_paths)
|
||||
),
|
||||
help=f'Configuration filenames or directories, defaults to: {config_paths}',
|
||||
)
|
||||
|
||||
return parser.parse_args(arguments)
|
||||
|
@ -44,13 +42,11 @@ def main(): # pragma: no cover
|
|||
try:
|
||||
validate.parse_configuration(config_filename, validate.schema_filename())
|
||||
except (ValueError, OSError, validate.Validation_error) as error:
|
||||
logging.critical('{}: Error parsing configuration file'.format(config_filename))
|
||||
logging.critical(f'{config_filename}: Error parsing configuration file')
|
||||
logging.critical(error)
|
||||
found_issues = True
|
||||
|
||||
if found_issues:
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.info(
|
||||
'All given configuration files are valid: {}'.format(', '.join(config_filenames))
|
||||
)
|
||||
logger.info(f"All given configuration files are valid: {', '.join(config_filenames)}")
|
||||
|
|
|
@ -16,8 +16,8 @@ def get_default_config_paths(expand_home=True):
|
|||
return [
|
||||
'/etc/borgmatic/config.yaml',
|
||||
'/etc/borgmatic.d',
|
||||
'%s/borgmatic/config.yaml' % user_config_directory,
|
||||
'%s/borgmatic.d' % user_config_directory,
|
||||
os.path.join(user_config_directory, 'borgmatic/config.yaml'),
|
||||
os.path.join(user_config_directory, 'borgmatic.d'),
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -14,11 +14,14 @@ def _resolve_string(matcher):
|
|||
if matcher.group('escape') is not None:
|
||||
# in case of escaped envvar, unescape it
|
||||
return matcher.group('variable')
|
||||
|
||||
# resolve the env var
|
||||
name, default = matcher.group('name'), matcher.group('default')
|
||||
out = os.getenv(name, default=default)
|
||||
|
||||
if out is None:
|
||||
raise ValueError('Cannot find variable ${name} in environment'.format(name=name))
|
||||
raise ValueError(f'Cannot find variable {name} in environment')
|
||||
|
||||
return out
|
||||
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False):
|
|||
config, schema, indent=indent, skip_first=parent_is_sequence
|
||||
)
|
||||
else:
|
||||
raise ValueError('Schema at level {} is unsupported: {}'.format(level, schema))
|
||||
raise ValueError(f'Schema at level {level} is unsupported: {schema}')
|
||||
|
||||
return config
|
||||
|
||||
|
@ -84,7 +84,7 @@ def _comment_out_optional_configuration(rendered_config):
|
|||
for line in rendered_config.split('\n'):
|
||||
# Upon encountering an optional configuration option, comment out lines until the next blank
|
||||
# line.
|
||||
if line.strip().startswith('# {}'.format(COMMENTED_OUT_SENTINEL)):
|
||||
if line.strip().startswith(f'# {COMMENTED_OUT_SENTINEL}'):
|
||||
optional = True
|
||||
continue
|
||||
|
||||
|
@ -117,9 +117,7 @@ def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=
|
|||
'''
|
||||
if not overwrite and os.path.exists(config_filename):
|
||||
raise FileExistsError(
|
||||
'{} already exists. Aborting. Use --overwrite to replace the file.'.format(
|
||||
config_filename
|
||||
)
|
||||
f'{config_filename} already exists. Aborting. Use --overwrite to replace the file.'
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -218,7 +216,7 @@ def remove_commented_out_sentinel(config, field_name):
|
|||
except KeyError:
|
||||
return
|
||||
|
||||
if last_comment_value == '# {}\n'.format(COMMENTED_OUT_SENTINEL):
|
||||
if last_comment_value == f'# {COMMENTED_OUT_SENTINEL}\n':
|
||||
config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop()
|
||||
|
||||
|
||||
|
|
|
@ -70,13 +70,11 @@ def validate_configuration_format(parser, config_format):
|
|||
section_format.name for section_format in config_format
|
||||
)
|
||||
if unknown_section_names:
|
||||
raise ValueError(
|
||||
'Unknown config sections found: {}'.format(', '.join(unknown_section_names))
|
||||
)
|
||||
raise ValueError(f"Unknown config sections found: {', '.join(unknown_section_names)}")
|
||||
|
||||
missing_section_names = set(required_section_names) - section_names
|
||||
if missing_section_names:
|
||||
raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names)))
|
||||
raise ValueError(f"Missing config sections: {', '.join(missing_section_names)}")
|
||||
|
||||
for section_format in config_format:
|
||||
if section_format.name not in section_names:
|
||||
|
@ -91,9 +89,7 @@ def validate_configuration_format(parser, config_format):
|
|||
|
||||
if unexpected_option_names:
|
||||
raise ValueError(
|
||||
'Unexpected options found in config section {}: {}'.format(
|
||||
section_format.name, ', '.join(sorted(unexpected_option_names))
|
||||
)
|
||||
f"Unexpected options found in config section {section_format.name}: {', '.join(sorted(unexpected_option_names))}",
|
||||
)
|
||||
|
||||
missing_option_names = tuple(
|
||||
|
@ -105,9 +101,7 @@ def validate_configuration_format(parser, config_format):
|
|||
|
||||
if missing_option_names:
|
||||
raise ValueError(
|
||||
'Required options missing from config section {}: {}'.format(
|
||||
section_format.name, ', '.join(missing_option_names)
|
||||
)
|
||||
f"Required options missing from config section {section_format.name}: {', '.join(missing_option_names)}",
|
||||
)
|
||||
|
||||
|
||||
|
@ -137,7 +131,7 @@ def parse_configuration(config_filename, config_format):
|
|||
'''
|
||||
parser = RawConfigParser()
|
||||
if not parser.read(config_filename):
|
||||
raise ValueError('Configuration file cannot be opened: {}'.format(config_filename))
|
||||
raise ValueError(f'Configuration file cannot be opened: {config_filename}')
|
||||
|
||||
validate_configuration_format(parser, config_format)
|
||||
|
||||
|
|
|
@ -20,9 +20,9 @@ def format_json_error_path_element(path_element):
|
|||
Given a path element into a JSON data structure, format it for display as a string.
|
||||
'''
|
||||
if isinstance(path_element, int):
|
||||
return str('[{}]'.format(path_element))
|
||||
return str(f'[{path_element}]')
|
||||
|
||||
return str('.{}'.format(path_element))
|
||||
return str(f'.{path_element}')
|
||||
|
||||
|
||||
def format_json_error(error):
|
||||
|
@ -30,10 +30,10 @@ def format_json_error(error):
|
|||
Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string.
|
||||
'''
|
||||
if not error.path:
|
||||
return 'At the top level: {}'.format(error.message)
|
||||
return f'At the top level: {error.message}'
|
||||
|
||||
formatted_path = ''.join(format_json_error_path_element(element) for element in error.path)
|
||||
return "At '{}': {}".format(formatted_path.lstrip('.'), error.message)
|
||||
return f"At '{formatted_path.lstrip('.')}': {error.message}"
|
||||
|
||||
|
||||
class Validation_error(ValueError):
|
||||
|
@ -54,9 +54,10 @@ class Validation_error(ValueError):
|
|||
'''
|
||||
Render a validation error as a user-facing string.
|
||||
'''
|
||||
return 'An error occurred while parsing a configuration file at {}:\n'.format(
|
||||
self.config_filename
|
||||
) + '\n'.join(error for error in self.errors)
|
||||
return (
|
||||
f'An error occurred while parsing a configuration file at {self.config_filename}:\n'
|
||||
+ '\n'.join(error for error in self.errors)
|
||||
)
|
||||
|
||||
|
||||
def apply_logical_validation(config_filename, parsed_configuration):
|
||||
|
@ -72,9 +73,7 @@ def apply_logical_validation(config_filename, parsed_configuration):
|
|||
raise Validation_error(
|
||||
config_filename,
|
||||
(
|
||||
'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format(
|
||||
repository
|
||||
),
|
||||
f'Unknown repository in the "consistency" section\'s "check_repositories": {repository}',
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -173,9 +172,9 @@ def guard_configuration_contains_repository(repository, configurations):
|
|||
)
|
||||
|
||||
if count == 0:
|
||||
raise ValueError('Repository {} not found in configuration files'.format(repository))
|
||||
raise ValueError(f'Repository {repository} not found in configuration files')
|
||||
if count > 1:
|
||||
raise ValueError('Repository {} found in multiple configuration files'.format(repository))
|
||||
raise ValueError(f'Repository {repository} found in multiple configuration files')
|
||||
|
||||
|
||||
def guard_single_repository_selected(repository, configurations):
|
||||
|
|
|
@ -11,7 +11,7 @@ ERROR_OUTPUT_MAX_LINE_COUNT = 25
|
|||
BORG_ERROR_EXIT_CODE = 2
|
||||
|
||||
|
||||
def exit_code_indicates_error(process, exit_code, borg_local_path=None):
|
||||
def exit_code_indicates_error(command, exit_code, borg_local_path=None):
|
||||
'''
|
||||
Return True if the given exit code from running a command corresponds to an error. If a Borg
|
||||
local path is given and matches the process' command, then treat exit code 1 as a warning
|
||||
|
@ -20,8 +20,6 @@ def exit_code_indicates_error(process, exit_code, borg_local_path=None):
|
|||
if exit_code is None:
|
||||
return False
|
||||
|
||||
command = process.args.split(' ') if isinstance(process.args, str) else process.args
|
||||
|
||||
if borg_local_path and command[0] == borg_local_path:
|
||||
return bool(exit_code < 0 or exit_code >= BORG_ERROR_EXIT_CODE)
|
||||
|
||||
|
@ -121,8 +119,9 @@ def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path):
|
|||
if exit_code is None:
|
||||
still_running = True
|
||||
|
||||
command = process.args.split(' ') if isinstance(process.args, str) else process.args
|
||||
# If any process errors, then raise accordingly.
|
||||
if exit_code_indicates_error(process, exit_code, borg_local_path):
|
||||
if exit_code_indicates_error(command, exit_code, borg_local_path):
|
||||
# If an error occurs, include its output in the raised exception so that we don't
|
||||
# inadvertently hide error output.
|
||||
output_buffer = output_buffer_for_process(process, exclude_stdouts)
|
||||
|
@ -155,8 +154,8 @@ def log_command(full_command, input_file=None, output_file=None):
|
|||
'''
|
||||
logger.debug(
|
||||
' '.join(full_command)
|
||||
+ (' < {}'.format(getattr(input_file, 'name', '')) if input_file else '')
|
||||
+ (' > {}'.format(getattr(output_file, 'name', '')) if output_file else '')
|
||||
+ (f" < {getattr(input_file, 'name', '')}" if input_file else '')
|
||||
+ (f" > {getattr(output_file, 'name', '')}" if output_file else '')
|
||||
)
|
||||
|
||||
|
||||
|
@ -228,6 +227,7 @@ def execute_command_and_capture_output(
|
|||
environment = {**os.environ, **extra_environment} if extra_environment else None
|
||||
command = ' '.join(full_command) if shell else full_command
|
||||
|
||||
try:
|
||||
output = subprocess.check_output(
|
||||
command,
|
||||
stderr=subprocess.STDOUT if capture_stderr else None,
|
||||
|
@ -235,6 +235,12 @@ def execute_command_and_capture_output(
|
|||
env=environment,
|
||||
cwd=working_directory,
|
||||
)
|
||||
logger.warning(f'Command output: {output}')
|
||||
except subprocess.CalledProcessError as error:
|
||||
if exit_code_indicates_error(command, error.returncode):
|
||||
raise
|
||||
output = error.output
|
||||
logger.warning(f'Command output: {output}')
|
||||
|
||||
return output.decode() if output is not None else None
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ def interpolate_context(config_filename, hook_description, command, context):
|
|||
names/values, interpolate the values by "{name}" into the command and return the result.
|
||||
'''
|
||||
for name, value in context.items():
|
||||
command = command.replace('{%s}' % name, str(value))
|
||||
command = command.replace(f'{{{name}}}', str(value))
|
||||
|
||||
for unsupported_variable in re.findall(r'{\w+}', command):
|
||||
logger.warning(
|
||||
|
@ -38,7 +38,7 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
|
|||
Raise subprocesses.CalledProcessError if an error occurs in a hook.
|
||||
'''
|
||||
if not commands:
|
||||
logger.debug('{}: No commands to run for {} hook'.format(config_filename, description))
|
||||
logger.debug(f'{config_filename}: No commands to run for {description} hook')
|
||||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually running hooks)' if dry_run else ''
|
||||
|
@ -49,19 +49,15 @@ def execute_hook(commands, umask, config_filename, description, dry_run, **conte
|
|||
]
|
||||
|
||||
if len(commands) == 1:
|
||||
logger.info(
|
||||
'{}: Running command for {} hook{}'.format(config_filename, description, dry_run_label)
|
||||
)
|
||||
logger.info(f'{config_filename}: Running command for {description} hook{dry_run_label}')
|
||||
else:
|
||||
logger.info(
|
||||
'{}: Running {} commands for {} hook{}'.format(
|
||||
config_filename, len(commands), description, dry_run_label
|
||||
)
|
||||
f'{config_filename}: Running {len(commands)} commands for {description} hook{dry_run_label}',
|
||||
)
|
||||
|
||||
if umask:
|
||||
parsed_umask = int(str(umask), 8)
|
||||
logger.debug('{}: Set hook umask to {}'.format(config_filename, oct(parsed_umask)))
|
||||
logger.debug(f'{config_filename}: Set hook umask to {oct(parsed_umask)}')
|
||||
original_umask = os.umask(parsed_umask)
|
||||
else:
|
||||
original_umask = None
|
||||
|
@ -93,9 +89,7 @@ def considered_soft_failure(config_filename, error):
|
|||
|
||||
if exit_code == SOFT_FAIL_EXIT_CODE:
|
||||
logger.info(
|
||||
'{}: Command hook exited with soft failure exit code ({}); skipping remaining actions'.format(
|
||||
config_filename, SOFT_FAIL_EXIT_CODE
|
||||
)
|
||||
f'{config_filename}: Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining actions',
|
||||
)
|
||||
return True
|
||||
|
||||
|
|
|
@ -34,17 +34,15 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||
formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state])
|
||||
formatted_state = f'/{MONITOR_STATE_TO_CRONHUB[state]}/'
|
||||
ping_url = (
|
||||
hook_config['ping_url']
|
||||
.replace('/start/', formatted_state)
|
||||
.replace('/ping/', formatted_state)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||
)
|
||||
logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url))
|
||||
logger.info(f'{config_filename}: Pinging Cronhub {state.name.lower()}{dry_run_label}')
|
||||
logger.debug(f'{config_filename}: Using Cronhub ping URL {ping_url}')
|
||||
|
||||
if not dry_run:
|
||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||
|
|
|
@ -34,12 +34,10 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||
ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state])
|
||||
ping_url = f"{hook_config['ping_url']}/{MONITOR_STATE_TO_CRONITOR[state]}"
|
||||
|
||||
logger.info(
|
||||
'{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||
)
|
||||
logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url))
|
||||
logger.info(f'{config_filename}: Pinging Cronitor {state.name.lower()}{dry_run_label}')
|
||||
logger.debug(f'{config_filename}: Using Cronitor ping URL {ping_url}')
|
||||
|
||||
if not dry_run:
|
||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||
|
|
|
@ -43,9 +43,9 @@ def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs):
|
|||
try:
|
||||
module = HOOK_NAME_TO_MODULE[hook_name]
|
||||
except KeyError:
|
||||
raise ValueError('Unknown hook name: {}'.format(hook_name))
|
||||
raise ValueError(f'Unknown hook name: {hook_name}')
|
||||
|
||||
logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name))
|
||||
logger.debug(f'{log_prefix}: Calling {hook_name} hook function {function_name}')
|
||||
return getattr(module, function_name)(config, log_prefix, *args, **kwargs)
|
||||
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ def make_database_dump_filename(dump_path, name, hostname=None):
|
|||
Raise ValueError if the database name is invalid.
|
||||
'''
|
||||
if os.path.sep in name:
|
||||
raise ValueError('Invalid database name {}'.format(name))
|
||||
raise ValueError(f'Invalid database name {name}')
|
||||
|
||||
return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name)
|
||||
|
||||
|
@ -60,9 +60,7 @@ def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run):
|
|||
'''
|
||||
dry_run_label = ' (dry run; not actually removing anything)' if dry_run else ''
|
||||
|
||||
logger.debug(
|
||||
'{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label)
|
||||
)
|
||||
logger.debug(f'{log_prefix}: Removing {database_type_name} database dumps{dry_run_label}')
|
||||
|
||||
expanded_path = os.path.expanduser(dump_path)
|
||||
|
||||
|
@ -78,4 +76,4 @@ def convert_glob_patterns_to_borg_patterns(patterns):
|
|||
Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive
|
||||
patterns like "sh:etc/*".
|
||||
'''
|
||||
return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns]
|
||||
return [f'sh:{pattern.lstrip(os.path.sep)}' for pattern in patterns]
|
||||
|
|
|
@ -99,7 +99,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
ping_url = (
|
||||
hook_config['ping_url']
|
||||
if hook_config['ping_url'].startswith('http')
|
||||
else 'https://hc-ping.com/{}'.format(hook_config['ping_url'])
|
||||
else f"https://hc-ping.com/{hook_config['ping_url']}"
|
||||
)
|
||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||
|
||||
|
@ -111,12 +111,10 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
|
||||
healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state)
|
||||
if healthchecks_state:
|
||||
ping_url = '{}/{}'.format(ping_url, healthchecks_state)
|
||||
ping_url = f'{ping_url}/{healthchecks_state}'
|
||||
|
||||
logger.info(
|
||||
'{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label)
|
||||
)
|
||||
logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url))
|
||||
logger.info(f'{config_filename}: Pinging Healthchecks {state.name.lower()}{dry_run_label}')
|
||||
logger.debug(f'{config_filename}: Using Healthchecks ping URL {ping_url}')
|
||||
|
||||
if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG):
|
||||
payload = format_buffered_logs_for_payload()
|
||||
|
|
|
@ -27,7 +27,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
|||
'''
|
||||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
|
||||
logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label))
|
||||
logger.info(f'{log_prefix}: Dumping MongoDB databases{dry_run_label}')
|
||||
|
||||
processes = []
|
||||
for database in databases:
|
||||
|
@ -38,9 +38,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
|||
dump_format = database.get('format', 'archive')
|
||||
|
||||
logger.debug(
|
||||
'{}: Dumping MongoDB database {} to {}{}'.format(
|
||||
log_prefix, name, dump_filename, dry_run_label
|
||||
)
|
||||
f'{log_prefix}: Dumping MongoDB database {name} to {dump_filename}{dry_run_label}',
|
||||
)
|
||||
if dry_run:
|
||||
continue
|
||||
|
@ -126,9 +124,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
|||
)
|
||||
restore_command = build_restore_command(extract_process, database, dump_filename)
|
||||
|
||||
logger.debug(
|
||||
'{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||
)
|
||||
logger.debug(f"{log_prefix}: Restoring MongoDB database {database['name']}{dry_run_label}")
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
|||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
|
||||
logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label))
|
||||
logger.info(f'{log_prefix}: Dumping MySQL databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
dump_path = make_dump_path(location_config)
|
||||
|
@ -209,9 +209,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
|||
)
|
||||
extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None
|
||||
|
||||
logger.debug(
|
||||
'{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||
)
|
||||
logger.debug(f"{log_prefix}: Restoring MySQL database {database['name']}{dry_run_label}")
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
|
|
|
@ -29,14 +29,12 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
'''
|
||||
if state != monitor.State.FAIL:
|
||||
logger.debug(
|
||||
'{}: Ignoring unsupported monitoring {} in PagerDuty hook'.format(
|
||||
config_filename, state.name.lower()
|
||||
)
|
||||
f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in PagerDuty hook',
|
||||
)
|
||||
return
|
||||
|
||||
dry_run_label = ' (dry run; not actually sending)' if dry_run else ''
|
||||
logger.info('{}: Sending failure event to PagerDuty {}'.format(config_filename, dry_run_label))
|
||||
logger.info(f'{config_filename}: Sending failure event to PagerDuty {dry_run_label}')
|
||||
|
||||
if dry_run:
|
||||
return
|
||||
|
@ -50,7 +48,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
'routing_key': hook_config['integration_key'],
|
||||
'event_action': 'trigger',
|
||||
'payload': {
|
||||
'summary': 'backup failed on {}'.format(hostname),
|
||||
'summary': f'backup failed on {hostname}',
|
||||
'severity': 'error',
|
||||
'source': hostname,
|
||||
'timestamp': local_timestamp,
|
||||
|
@ -65,7 +63,7 @@ def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_
|
|||
},
|
||||
}
|
||||
)
|
||||
logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload))
|
||||
logger.debug(f'{config_filename}: Using PagerDuty payload: {payload}')
|
||||
|
||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||
try:
|
||||
|
|
|
@ -93,7 +93,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
|||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
|
||||
logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label))
|
||||
logger.info(f'{log_prefix}: Dumping PostgreSQL databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
extra_environment = make_extra_environment(database)
|
||||
|
@ -228,9 +228,7 @@ def restore_database_dump(database_config, log_prefix, location_config, dry_run,
|
|||
)
|
||||
extra_environment = make_extra_environment(database)
|
||||
|
||||
logger.debug(
|
||||
'{}: Restoring PostgreSQL database {}{}'.format(log_prefix, database['name'], dry_run_label)
|
||||
)
|
||||
logger.debug(f"{log_prefix}: Restoring PostgreSQL database {database['name']}{dry_run_label}")
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ def dump_databases(databases, log_prefix, location_config, dry_run):
|
|||
dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else ''
|
||||
processes = []
|
||||
|
||||
logger.info('{}: Dumping SQLite databases{}'.format(log_prefix, dry_run_label))
|
||||
logger.info(f'{log_prefix}: Dumping SQLite databases{dry_run_label}')
|
||||
|
||||
for database in databases:
|
||||
database_path = database['path']
|
||||
|
|
|
@ -108,7 +108,7 @@ def color_text(color, message):
|
|||
if not color:
|
||||
return message
|
||||
|
||||
return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL)
|
||||
return f'{color}{message}{colorama.Style.RESET_ALL}'
|
||||
|
||||
|
||||
def add_logging_level(level_name, level_number):
|
||||
|
|
|
@ -18,6 +18,7 @@ RUN npm install @11ty/eleventy \
|
|||
@11ty/eleventy-plugin-syntaxhighlight \
|
||||
@11ty/eleventy-plugin-inclusive-language \
|
||||
@11ty/eleventy-navigation \
|
||||
eleventy-plugin-code-clipboard \
|
||||
markdown-it \
|
||||
markdown-it-anchor \
|
||||
markdown-it-replace-link
|
||||
|
|
|
@ -533,3 +533,18 @@ main .elv-toc + h1 .direct-link {
|
|||
.header-anchor:hover::after {
|
||||
content: " 🔗";
|
||||
}
|
||||
|
||||
.mdi {
|
||||
display: inline-block;
|
||||
width: 1em;
|
||||
height: 1em;
|
||||
background-color: currentColor;
|
||||
-webkit-mask: no-repeat center / 100%;
|
||||
mask: no-repeat center / 100%;
|
||||
-webkit-mask-image: var(--svg);
|
||||
mask-image: var(--svg);
|
||||
}
|
||||
|
||||
.mdi.mdi-content-copy {
|
||||
--svg: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='black' d='M19 21H8V7h11m0-2H8a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h11a2 2 0 0 0 2-2V7a2 2 0 0 0-2-2m-3-4H4a2 2 0 0 0-2 2v14h2V3h12V1Z'/%3E%3C/svg%3E");
|
||||
}
|
||||
|
|
|
@ -22,6 +22,6 @@
|
|||
<body>
|
||||
|
||||
{{ content | safe }}
|
||||
|
||||
{% initClipboardJS %}
|
||||
</body>
|
||||
</html>
|
||||
|
|
|
@ -6,6 +6,8 @@ colorama==0.4.4
|
|||
coverage==5.3
|
||||
flake8==4.0.1
|
||||
flake8-quotes==3.3.2
|
||||
flake8-use-fstring==1.4
|
||||
flake8-variables-names==0.0.5
|
||||
flexmock==0.10.4
|
||||
isort==5.9.1
|
||||
mccabe==0.6.1
|
||||
|
|
|
@ -12,9 +12,7 @@ def generate_configuration(config_path, repository_path):
|
|||
to work for testing (including injecting the given repository path and tacking on an encryption
|
||||
passphrase).
|
||||
'''
|
||||
subprocess.check_call(
|
||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
||||
)
|
||||
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||
config = (
|
||||
open(config_path)
|
||||
.read()
|
||||
|
@ -46,13 +44,13 @@ def test_borgmatic_command():
|
|||
generate_configuration(config_path, repository_path)
|
||||
|
||||
subprocess.check_call(
|
||||
'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ')
|
||||
f'borgmatic -v 2 --config {config_path} init --encryption repokey'.split(' ')
|
||||
)
|
||||
|
||||
# Run borgmatic to generate a backup archive, and then list it to make sure it exists.
|
||||
subprocess.check_call('borgmatic --config {}'.format(config_path).split(' '))
|
||||
subprocess.check_call(f'borgmatic --config {config_path}'.split(' '))
|
||||
output = subprocess.check_output(
|
||||
'borgmatic --config {} list --json'.format(config_path).split(' ')
|
||||
f'borgmatic --config {config_path} list --json'.split(' ')
|
||||
).decode(sys.stdout.encoding)
|
||||
parsed_output = json.loads(output)
|
||||
|
||||
|
@ -63,16 +61,14 @@ def test_borgmatic_command():
|
|||
# Extract the created archive into the current (temporary) directory, and confirm that the
|
||||
# extracted file looks right.
|
||||
output = subprocess.check_output(
|
||||
'borgmatic --config {} extract --archive {}'.format(config_path, archive_name).split(
|
||||
' '
|
||||
)
|
||||
f'borgmatic --config {config_path} extract --archive {archive_name}'.split(' '),
|
||||
).decode(sys.stdout.encoding)
|
||||
extracted_config_path = os.path.join(extract_path, config_path)
|
||||
assert open(extracted_config_path).read() == open(config_path).read()
|
||||
|
||||
# Exercise the info action.
|
||||
output = subprocess.check_output(
|
||||
'borgmatic --config {} info --json'.format(config_path).split(' ')
|
||||
f'borgmatic --config {config_path} info --json'.split(' '),
|
||||
).decode(sys.stdout.encoding)
|
||||
parsed_output = json.loads(output)
|
||||
|
||||
|
|
|
@ -189,7 +189,7 @@ def test_database_dump_with_error_causes_borgmatic_to_exit():
|
|||
'-v',
|
||||
'2',
|
||||
'--override',
|
||||
"hooks.postgresql_databases=[{'name': 'nope'}]",
|
||||
"hooks.postgresql_databases=[{'name': 'nope'}]", # noqa: FS003
|
||||
]
|
||||
)
|
||||
finally:
|
||||
|
|
|
@ -10,17 +10,15 @@ def generate_configuration(config_path, repository_path):
|
|||
to work for testing (including injecting the given repository path and tacking on an encryption
|
||||
passphrase).
|
||||
'''
|
||||
subprocess.check_call(
|
||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
||||
)
|
||||
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||
config = (
|
||||
open(config_path)
|
||||
.read()
|
||||
.replace('ssh://user@backupserver/./sourcehostname.borg', repository_path)
|
||||
.replace('- ssh://user@backupserver/./{fqdn}', '')
|
||||
.replace('- ssh://user@backupserver/./{fqdn}', '') # noqa: FS003
|
||||
.replace('- /var/local/backups/local.borg', '')
|
||||
.replace('- /home/user/path with spaces', '')
|
||||
.replace('- /home', '- {}'.format(config_path))
|
||||
.replace('- /home', f'- {config_path}')
|
||||
.replace('- /etc', '')
|
||||
.replace('- /var/log/syslog*', '')
|
||||
+ 'storage:\n encryption_passphrase: "test"'
|
||||
|
|
|
@ -7,12 +7,8 @@ def test_validate_config_command_with_valid_configuration_succeeds():
|
|||
with tempfile.TemporaryDirectory() as temporary_directory:
|
||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||
|
||||
subprocess.check_call(
|
||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
||||
)
|
||||
exit_code = subprocess.call(
|
||||
'validate-borgmatic-config --config {}'.format(config_path).split(' ')
|
||||
)
|
||||
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||
exit_code = subprocess.call(f'validate-borgmatic-config --config {config_path}'.split(' '))
|
||||
|
||||
assert exit_code == 0
|
||||
|
||||
|
@ -21,16 +17,12 @@ def test_validate_config_command_with_invalid_configuration_fails():
|
|||
with tempfile.TemporaryDirectory() as temporary_directory:
|
||||
config_path = os.path.join(temporary_directory, 'test.yaml')
|
||||
|
||||
subprocess.check_call(
|
||||
'generate-borgmatic-config --destination {}'.format(config_path).split(' ')
|
||||
)
|
||||
subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' '))
|
||||
config = open(config_path).read().replace('keep_daily: 7', 'keep_daily: "7"')
|
||||
config_file = open(config_path, 'w')
|
||||
config_file.write(config)
|
||||
config_file.close()
|
||||
|
||||
exit_code = subprocess.call(
|
||||
'validate-borgmatic-config --config {}'.format(config_path).split(' ')
|
||||
)
|
||||
exit_code = subprocess.call(f'validate-borgmatic-config --config {config_path}'.split(' '))
|
||||
|
||||
assert exit_code == 1
|
||||
|
|
|
@ -7,7 +7,7 @@ from borgmatic.config import legacy as module
|
|||
|
||||
def test_parse_section_options_with_punctuation_should_return_section_options():
|
||||
parser = module.RawConfigParser()
|
||||
parser.read_file(StringIO('[section]\nfoo: {}\n'.format(string.punctuation)))
|
||||
parser.read_file(StringIO(f'[section]\nfoo: {string.punctuation}\n'))
|
||||
|
||||
section_format = module.Section_format(
|
||||
'section', (module.Config_option('foo', str, required=True),)
|
||||
|
|
|
@ -138,10 +138,10 @@ def test_log_outputs_kills_other_processes_when_one_errors():
|
|||
|
||||
process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
flexmock(module).should_receive('exit_code_indicates_error').with_args(
|
||||
process, None, 'borg'
|
||||
['grep'], None, 'borg'
|
||||
).and_return(False)
|
||||
flexmock(module).should_receive('exit_code_indicates_error').with_args(
|
||||
process, 2, 'borg'
|
||||
['grep'], 2, 'borg'
|
||||
).and_return(True)
|
||||
other_process = subprocess.Popen(
|
||||
['sleep', '2'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
|
@ -245,10 +245,10 @@ def test_log_outputs_truncates_long_error_output():
|
|||
|
||||
process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
flexmock(module).should_receive('exit_code_indicates_error').with_args(
|
||||
process, None, 'borg'
|
||||
['grep'], None, 'borg'
|
||||
).and_return(False)
|
||||
flexmock(module).should_receive('exit_code_indicates_error').with_args(
|
||||
process, 2, 'borg'
|
||||
['grep'], 2, 'borg'
|
||||
).and_return(True)
|
||||
flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout)
|
||||
|
||||
|
|
|
@ -449,7 +449,7 @@ def test_collect_special_file_paths_excludes_non_special_files():
|
|||
) == ('/foo', '/baz')
|
||||
|
||||
|
||||
DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||
DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' # noqa: FS003
|
||||
REPO_ARCHIVE_WITH_PATHS = (f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'bar')
|
||||
|
||||
|
||||
|
@ -2193,7 +2193,7 @@ def test_create_archive_with_source_directories_glob_expands():
|
|||
)
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'),
|
||||
('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food'),
|
||||
output_log_level=logging.INFO,
|
||||
output_file=None,
|
||||
borg_local_path='borg',
|
||||
|
@ -2236,7 +2236,7 @@ def test_create_archive_with_non_matching_source_directories_glob_passes_through
|
|||
)
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo*'),
|
||||
('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo*'),
|
||||
output_log_level=logging.INFO,
|
||||
output_file=None,
|
||||
borg_local_path='borg',
|
||||
|
@ -2279,7 +2279,7 @@ def test_create_archive_with_glob_calls_borg_with_expanded_directories():
|
|||
)
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'),
|
||||
('borg', 'create', f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'food'),
|
||||
output_log_level=logging.INFO,
|
||||
output_file=None,
|
||||
borg_local_path='borg',
|
||||
|
@ -2345,7 +2345,7 @@ def test_create_archive_with_archive_name_format_calls_borg_with_archive_name():
|
|||
def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
|
||||
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||
repository_archive_pattern = 'repo::Documents_{hostname}-{now}'
|
||||
repository_archive_pattern = 'repo::Documents_{hostname}-{now}' # noqa: FS003
|
||||
flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([])
|
||||
flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar'))
|
||||
flexmock(module).should_receive('map_directories_to_devices').and_return({})
|
||||
|
@ -2380,7 +2380,7 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
|
|||
'repositories': ['repo'],
|
||||
'exclude_patterns': None,
|
||||
},
|
||||
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'},
|
||||
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003
|
||||
local_borg_version='1.2.3',
|
||||
)
|
||||
|
||||
|
@ -2388,7 +2388,7 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
|
|||
def test_create_archive_with_repository_accepts_borg_placeholders():
|
||||
flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels')
|
||||
flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER
|
||||
repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}'
|
||||
repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}' # noqa: FS003
|
||||
flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([])
|
||||
flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar'))
|
||||
flexmock(module).should_receive('map_directories_to_devices').and_return({})
|
||||
|
@ -2417,13 +2417,13 @@ def test_create_archive_with_repository_accepts_borg_placeholders():
|
|||
|
||||
module.create_archive(
|
||||
dry_run=False,
|
||||
repository='{fqdn}',
|
||||
repository='{fqdn}', # noqa: FS003
|
||||
location_config={
|
||||
'source_directories': ['foo', 'bar'],
|
||||
'repositories': ['{fqdn}'],
|
||||
'repositories': ['{fqdn}'], # noqa: FS003
|
||||
'exclude_patterns': None,
|
||||
},
|
||||
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'},
|
||||
storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, # noqa: FS003
|
||||
local_borg_version='1.2.3',
|
||||
)
|
||||
|
||||
|
|
|
@ -27,27 +27,39 @@ def test_make_prune_flags_returns_flags_from_config_plus_default_prefix_glob():
|
|||
|
||||
result = module.make_prune_flags(retention_config, local_borg_version='1.2.3')
|
||||
|
||||
assert tuple(result) == BASE_PRUNE_FLAGS + (('--match-archives', 'sh:{hostname}-*'),)
|
||||
assert tuple(result) == BASE_PRUNE_FLAGS + (
|
||||
('--match-archives', 'sh:{hostname}-*'), # noqa: FS003
|
||||
)
|
||||
|
||||
|
||||
def test_make_prune_flags_accepts_prefix_with_placeholders():
|
||||
retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')))
|
||||
retention_config = OrderedDict(
|
||||
(('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003
|
||||
)
|
||||
flexmock(module.feature).should_receive('available').and_return(True)
|
||||
|
||||
result = module.make_prune_flags(retention_config, local_borg_version='1.2.3')
|
||||
|
||||
expected = (('--keep-daily', '1'), ('--match-archives', 'sh:Documents_{hostname}-{now}*'))
|
||||
expected = (
|
||||
('--keep-daily', '1'),
|
||||
('--match-archives', 'sh:Documents_{hostname}-{now}*'), # noqa: FS003
|
||||
)
|
||||
|
||||
assert tuple(result) == expected
|
||||
|
||||
|
||||
def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives():
|
||||
retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')))
|
||||
retention_config = OrderedDict(
|
||||
(('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')) # noqa: FS003
|
||||
)
|
||||
flexmock(module.feature).should_receive('available').and_return(False)
|
||||
|
||||
result = module.make_prune_flags(retention_config, local_borg_version='1.2.3')
|
||||
|
||||
expected = (('--keep-daily', '1'), ('--glob-archives', 'Documents_{hostname}-{now}*'))
|
||||
expected = (
|
||||
('--keep-daily', '1'),
|
||||
('--glob-archives', 'Documents_{hostname}-{now}*'), # noqa: FS003
|
||||
)
|
||||
|
||||
assert tuple(result) == expected
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ def test_env(monkeypatch):
|
|||
|
||||
def test_env_braces(monkeypatch):
|
||||
monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo')
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE}'}
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} # noqa: FS003
|
||||
module.resolve_env_variables(config)
|
||||
assert config == {'key': 'Hello foo'}
|
||||
|
||||
|
@ -20,7 +20,7 @@ def test_env_braces(monkeypatch):
|
|||
def test_env_multi(monkeypatch):
|
||||
monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo')
|
||||
monkeypatch.setenv('MY_CUSTOM_VALUE2', 'bar')
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE}${MY_CUSTOM_VALUE2}'}
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE}${MY_CUSTOM_VALUE2}'} # noqa: FS003
|
||||
module.resolve_env_variables(config)
|
||||
assert config == {'key': 'Hello foobar'}
|
||||
|
||||
|
@ -28,21 +28,21 @@ def test_env_multi(monkeypatch):
|
|||
def test_env_escape(monkeypatch):
|
||||
monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo')
|
||||
monkeypatch.setenv('MY_CUSTOM_VALUE2', 'bar')
|
||||
config = {'key': r'Hello ${MY_CUSTOM_VALUE} \${MY_CUSTOM_VALUE}'}
|
||||
config = {'key': r'Hello ${MY_CUSTOM_VALUE} \${MY_CUSTOM_VALUE}'} # noqa: FS003
|
||||
module.resolve_env_variables(config)
|
||||
assert config == {'key': r'Hello foo ${MY_CUSTOM_VALUE}'}
|
||||
assert config == {'key': r'Hello foo ${MY_CUSTOM_VALUE}'} # noqa: FS003
|
||||
|
||||
|
||||
def test_env_default_value(monkeypatch):
|
||||
monkeypatch.delenv('MY_CUSTOM_VALUE', raising=False)
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE:-bar}'}
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE:-bar}'} # noqa: FS003
|
||||
module.resolve_env_variables(config)
|
||||
assert config == {'key': 'Hello bar'}
|
||||
|
||||
|
||||
def test_env_unknown(monkeypatch):
|
||||
monkeypatch.delenv('MY_CUSTOM_VALUE', raising=False)
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE}'}
|
||||
config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} # noqa: FS003
|
||||
with pytest.raises(ValueError):
|
||||
module.resolve_env_variables(config)
|
||||
|
||||
|
@ -55,20 +55,20 @@ def test_env_full(monkeypatch):
|
|||
'dict': {
|
||||
'key': 'value',
|
||||
'anotherdict': {
|
||||
'key': 'My ${MY_CUSTOM_VALUE} here',
|
||||
'other': '${MY_CUSTOM_VALUE}',
|
||||
'escaped': r'\${MY_CUSTOM_VALUE}',
|
||||
'key': 'My ${MY_CUSTOM_VALUE} here', # noqa: FS003
|
||||
'other': '${MY_CUSTOM_VALUE}', # noqa: FS003
|
||||
'escaped': r'\${MY_CUSTOM_VALUE}', # noqa: FS003
|
||||
'list': [
|
||||
'/home/${MY_CUSTOM_VALUE}/.local',
|
||||
'/home/${MY_CUSTOM_VALUE}/.local', # noqa: FS003
|
||||
'/var/log/',
|
||||
'/home/${MY_CUSTOM_VALUE2:-bar}/.config',
|
||||
'/home/${MY_CUSTOM_VALUE2:-bar}/.config', # noqa: FS003
|
||||
],
|
||||
},
|
||||
},
|
||||
'list': [
|
||||
'/home/${MY_CUSTOM_VALUE}/.local',
|
||||
'/home/${MY_CUSTOM_VALUE}/.local', # noqa: FS003
|
||||
'/var/log/',
|
||||
'/home/${MY_CUSTOM_VALUE2-bar}/.config',
|
||||
'/home/${MY_CUSTOM_VALUE2-bar}/.config', # noqa: FS003
|
||||
],
|
||||
}
|
||||
module.resolve_env_variables(config)
|
||||
|
@ -79,7 +79,7 @@ def test_env_full(monkeypatch):
|
|||
'anotherdict': {
|
||||
'key': 'My foo here',
|
||||
'other': 'foo',
|
||||
'escaped': '${MY_CUSTOM_VALUE}',
|
||||
'escaped': '${MY_CUSTOM_VALUE}', # noqa: FS003
|
||||
'list': ['/home/foo/.local', '/var/log/', '/home/bar/.config'],
|
||||
},
|
||||
},
|
||||
|
|
|
@ -13,7 +13,7 @@ def test_format_json_error_path_element_formats_property():
|
|||
|
||||
|
||||
def test_format_json_error_formats_error_including_path():
|
||||
flexmock(module).format_json_error_path_element = lambda element: '.{}'.format(element)
|
||||
flexmock(module).format_json_error_path_element = lambda element: f'.{element}'
|
||||
error = flexmock(message='oops', path=['foo', 'bar'])
|
||||
|
||||
assert module.format_json_error(error) == "At 'foo.bar': oops"
|
||||
|
@ -66,9 +66,9 @@ def test_apply_logical_validation_does_not_raise_if_archive_name_format_and_pref
|
|||
module.apply_logical_validation(
|
||||
'config.yaml',
|
||||
{
|
||||
'storage': {'archive_name_format': '{hostname}-{now}'},
|
||||
'retention': {'prefix': '{hostname}-'},
|
||||
'consistency': {'prefix': '{hostname}-'},
|
||||
'storage': {'archive_name_format': '{hostname}-{now}'}, # noqa: FS003
|
||||
'retention': {'prefix': '{hostname}-'}, # noqa: FS003
|
||||
'consistency': {'prefix': '{hostname}-'}, # noqa: FS003
|
||||
},
|
||||
)
|
||||
|
||||
|
|
|
@ -11,27 +11,20 @@ def test_interpolate_context_passes_through_command_without_variable():
|
|||
|
||||
|
||||
def test_interpolate_context_passes_through_command_with_unknown_variable():
|
||||
assert (
|
||||
module.interpolate_context('test.yaml', 'pre-backup', 'ls {baz}', {'foo': 'bar'})
|
||||
== 'ls {baz}'
|
||||
)
|
||||
command = 'ls {baz}' # noqa: FS003
|
||||
|
||||
assert module.interpolate_context('test.yaml', 'pre-backup', command, {'foo': 'bar'}) == command
|
||||
|
||||
|
||||
def test_interpolate_context_interpolates_variables():
|
||||
command = 'ls {foo}{baz} {baz}' # noqa: FS003
|
||||
context = {'foo': 'bar', 'baz': 'quux'}
|
||||
|
||||
assert (
|
||||
module.interpolate_context('test.yaml', 'pre-backup', 'ls {foo}{baz} {baz}', context)
|
||||
== 'ls barquux quux'
|
||||
module.interpolate_context('test.yaml', 'pre-backup', command, context) == 'ls barquux quux'
|
||||
)
|
||||
|
||||
|
||||
def test_interpolate_context_does_not_touch_unknown_variables():
|
||||
context = {'foo': 'bar', 'baz': 'quux'}
|
||||
|
||||
assert module.interpolate_context('test.yaml', 'pre-backup', 'ls {wtf}', context) == 'ls {wtf}'
|
||||
|
||||
|
||||
def test_execute_hook_invokes_each_command():
|
||||
flexmock(module).should_receive('interpolate_context').replace_with(
|
||||
lambda config_file, hook_description, command, context: command
|
||||
|
|
|
@ -206,9 +206,7 @@ def test_ping_monitor_with_ping_uuid_hits_corresponding_url():
|
|||
payload = 'data'
|
||||
flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload)
|
||||
flexmock(module.requests).should_receive('post').with_args(
|
||||
'https://hc-ping.com/{}'.format(hook_config['ping_url']),
|
||||
data=payload.encode('utf-8'),
|
||||
verify=True,
|
||||
f"https://hc-ping.com/{hook_config['ping_url']}", data=payload.encode('utf-8'), verify=True,
|
||||
).and_return(flexmock(ok=True))
|
||||
|
||||
module.ping_monitor(
|
||||
|
|
|
@ -17,7 +17,7 @@ def test_dump_databases_runs_mongodump_for_each_database():
|
|||
|
||||
for name, process in zip(('foo', 'bar'), processes):
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
['mongodump', '--db', name, '--archive', '>', 'databases/localhost/{}'.format(name)],
|
||||
['mongodump', '--db', name, '--archive', '>', f'databases/localhost/{name}'],
|
||||
shell=True,
|
||||
run_to_completion=False,
|
||||
).and_return(process).once()
|
||||
|
|
|
@ -134,7 +134,7 @@ def test_dump_databases_runs_pg_dump_for_each_database():
|
|||
'custom',
|
||||
name,
|
||||
'>',
|
||||
'databases/localhost/{}'.format(name),
|
||||
f'databases/localhost/{name}',
|
||||
),
|
||||
shell=True,
|
||||
extra_environment={'PGSSLMODE': 'disable'},
|
||||
|
|
|
@ -7,32 +7,32 @@ from borgmatic import execute as module
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'process,exit_code,borg_local_path,expected_result',
|
||||
'command,exit_code,borg_local_path,expected_result',
|
||||
(
|
||||
(flexmock(args=['grep']), 2, None, True),
|
||||
(flexmock(args=['grep']), 2, 'borg', True),
|
||||
(flexmock(args=['borg']), 2, 'borg', True),
|
||||
(flexmock(args=['borg1']), 2, 'borg1', True),
|
||||
(flexmock(args=['grep']), 1, None, True),
|
||||
(flexmock(args=['grep']), 1, 'borg', True),
|
||||
(flexmock(args=['borg']), 1, 'borg', False),
|
||||
(flexmock(args=['borg1']), 1, 'borg1', False),
|
||||
(flexmock(args=['grep']), 0, None, False),
|
||||
(flexmock(args=['grep']), 0, 'borg', False),
|
||||
(flexmock(args=['borg']), 0, 'borg', False),
|
||||
(flexmock(args=['borg1']), 0, 'borg1', False),
|
||||
(['grep'], 2, None, True),
|
||||
(['grep'], 2, 'borg', True),
|
||||
(['borg'], 2, 'borg', True),
|
||||
(['borg1'], 2, 'borg1', True),
|
||||
(['grep'], 1, None, True),
|
||||
(['grep'], 1, 'borg', True),
|
||||
(['borg'], 1, 'borg', False),
|
||||
(['borg1'], 1, 'borg1', False),
|
||||
(['grep'], 0, None, False),
|
||||
(['grep'], 0, 'borg', False),
|
||||
(['borg'], 0, 'borg', False),
|
||||
(['borg1'], 0, 'borg1', False),
|
||||
# -9 exit code occurs when child process get SIGKILLed.
|
||||
(flexmock(args=['grep']), -9, None, True),
|
||||
(flexmock(args=['grep']), -9, 'borg', True),
|
||||
(flexmock(args=['borg']), -9, 'borg', True),
|
||||
(flexmock(args=['borg1']), -9, 'borg1', True),
|
||||
(flexmock(args=['borg']), None, None, False),
|
||||
(['grep'], -9, None, True),
|
||||
(['grep'], -9, 'borg', True),
|
||||
(['borg'], -9, 'borg', True),
|
||||
(['borg1'], -9, 'borg1', True),
|
||||
(['borg'], None, None, False),
|
||||
),
|
||||
)
|
||||
def test_exit_code_indicates_error_respects_exit_code_and_borg_local_path(
|
||||
process, exit_code, borg_local_path, expected_result
|
||||
command, exit_code, borg_local_path, expected_result
|
||||
):
|
||||
assert module.exit_code_indicates_error(process, exit_code, borg_local_path) is expected_result
|
||||
assert module.exit_code_indicates_error(command, exit_code, borg_local_path) is expected_result
|
||||
|
||||
|
||||
def test_command_for_process_converts_sequence_command_to_string():
|
||||
|
@ -239,6 +239,34 @@ def test_execute_command_and_capture_output_with_capture_stderr_returns_stderr()
|
|||
assert output == expected_output
|
||||
|
||||
|
||||
def test_execute_command_and_capture_output_returns_output_when_process_error_is_not_considered_an_error():
|
||||
full_command = ['foo', 'bar']
|
||||
expected_output = '[]'
|
||||
err_output = b'[]'
|
||||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('check_output').with_args(
|
||||
full_command, stderr=None, shell=False, env=None, cwd=None
|
||||
).and_raise(subprocess.CalledProcessError(1, full_command, err_output)).once()
|
||||
flexmock(module).should_receive('exit_code_indicates_error').and_return(False).once()
|
||||
|
||||
output = module.execute_command_and_capture_output(full_command)
|
||||
|
||||
assert output == expected_output
|
||||
|
||||
|
||||
def test_execute_command_and_capture_output_raises_when_command_errors():
|
||||
full_command = ['foo', 'bar']
|
||||
expected_output = '[]'
|
||||
flexmock(module.os, environ={'a': 'b'})
|
||||
flexmock(module.subprocess).should_receive('check_output').with_args(
|
||||
full_command, stderr=None, shell=False, env=None, cwd=None
|
||||
).and_raise(subprocess.CalledProcessError(2, full_command, expected_output)).once()
|
||||
flexmock(module).should_receive('exit_code_indicates_error').and_return(True).once()
|
||||
|
||||
with pytest.raises(subprocess.CalledProcessError):
|
||||
module.execute_command_and_capture_output(full_command)
|
||||
|
||||
|
||||
def test_execute_command_and_capture_output_returns_output_with_shell():
|
||||
full_command = ['foo', 'bar']
|
||||
expected_output = '[]'
|
||||
|
|
Loading…
Reference in a new issue