Refactor to support subparsed-based parsed arguments.
This commit is contained in:
parent
881dc9b01e
commit
75c04611dc
1 changed files with 131 additions and 97 deletions
|
@ -37,7 +37,7 @@ SUBPARSER_ALIASES = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def parse_subparser_arguments(arguments, top_level_parser, subparsers):
|
def parse_subparser_arguments(unparsed_arguments, top_level_parser, subparsers):
|
||||||
'''
|
'''
|
||||||
Given a sequence of arguments, a top-level parser (containing subparsers), and a subparsers
|
Given a sequence of arguments, a top-level parser (containing subparsers), and a subparsers
|
||||||
object as returned by argparse.ArgumentParser().add_subparsers(), ask each subparser to parse
|
object as returned by argparse.ArgumentParser().add_subparsers(), ask each subparser to parse
|
||||||
|
@ -46,8 +46,8 @@ def parse_subparser_arguments(arguments, top_level_parser, subparsers):
|
||||||
Return the result as a dict mapping from subparser name (or "global") to a parsed namespace of
|
Return the result as a dict mapping from subparser name (or "global") to a parsed namespace of
|
||||||
arguments.
|
arguments.
|
||||||
'''
|
'''
|
||||||
parsed_arguments = collections.OrderedDict()
|
arguments = collections.OrderedDict()
|
||||||
remaining_arguments = list(arguments)
|
remaining_arguments = list(unparsed_arguments)
|
||||||
alias_to_subparser_name = {
|
alias_to_subparser_name = {
|
||||||
alias: subparser_name
|
alias: subparser_name
|
||||||
for subparser_name, aliases in SUBPARSER_ALIASES.items()
|
for subparser_name, aliases in SUBPARSER_ALIASES.items()
|
||||||
|
@ -56,27 +56,27 @@ def parse_subparser_arguments(arguments, top_level_parser, subparsers):
|
||||||
|
|
||||||
# Give each subparser a shot at parsing all arguments.
|
# Give each subparser a shot at parsing all arguments.
|
||||||
for subparser_name, subparser in subparsers.choices.items():
|
for subparser_name, subparser in subparsers.choices.items():
|
||||||
if subparser_name not in arguments:
|
if subparser_name not in unparsed_arguments:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
remaining_arguments.remove(subparser_name)
|
remaining_arguments.remove(subparser_name)
|
||||||
canonical_name = alias_to_subparser_name.get(subparser_name, subparser_name)
|
canonical_name = alias_to_subparser_name.get(subparser_name, subparser_name)
|
||||||
|
|
||||||
parsed, remaining = subparser.parse_known_args(arguments)
|
parsed, remaining = subparser.parse_known_args(unparsed_arguments)
|
||||||
parsed_arguments[canonical_name] = parsed
|
arguments[canonical_name] = parsed
|
||||||
|
|
||||||
# Then ask each subparser, one by one, to greedily consume arguments. Any arguments that remain
|
# Then ask each subparser, one by one, to greedily consume arguments. Any arguments that remain
|
||||||
# are global arguments.
|
# are global arguments.
|
||||||
for subparser_name in parsed_arguments.keys():
|
for subparser_name in arguments.keys():
|
||||||
subparser = subparsers.choices[subparser_name]
|
subparser = subparsers.choices[subparser_name]
|
||||||
parsed, remaining_arguments = subparser.parse_known_args(remaining_arguments)
|
parsed, remaining_arguments = subparser.parse_known_args(remaining_arguments)
|
||||||
|
|
||||||
parsed_arguments['global'] = top_level_parser.parse_args(remaining_arguments)
|
arguments['global'] = top_level_parser.parse_args(remaining_arguments)
|
||||||
|
|
||||||
return parsed_arguments
|
return arguments
|
||||||
|
|
||||||
|
|
||||||
def parse_arguments(*arguments):
|
def parse_arguments(*unparsed_arguments):
|
||||||
'''
|
'''
|
||||||
Given command-line arguments with which this script was invoked, parse the arguments and return
|
Given command-line arguments with which this script was invoked, parse the arguments and return
|
||||||
them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance.
|
them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance.
|
||||||
|
@ -153,7 +153,10 @@ def parse_arguments(*arguments):
|
||||||
)
|
)
|
||||||
init_group = init_parser.add_argument_group('init arguments')
|
init_group = init_parser.add_argument_group('init arguments')
|
||||||
init_group.add_argument(
|
init_group.add_argument(
|
||||||
'-e', '--encryption', dest='encryption_mode', help='Borg repository encryption mode',
|
'-e',
|
||||||
|
'--encryption',
|
||||||
|
dest='encryption_mode',
|
||||||
|
help='Borg repository encryption mode',
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
init_group.add_argument(
|
init_group.add_argument(
|
||||||
|
@ -167,9 +170,7 @@ def parse_arguments(*arguments):
|
||||||
dest='storage_quota',
|
dest='storage_quota',
|
||||||
help='Create a repository with a fixed storage quota',
|
help='Create a repository with a fixed storage quota',
|
||||||
)
|
)
|
||||||
init_group.add_argument(
|
init_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
|
||||||
)
|
|
||||||
|
|
||||||
prune_parser = subparsers.add_parser(
|
prune_parser = subparsers.add_parser(
|
||||||
'prune',
|
'prune',
|
||||||
|
@ -186,9 +187,7 @@ def parse_arguments(*arguments):
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Display statistics of archive',
|
help='Display statistics of archive',
|
||||||
)
|
)
|
||||||
prune_group.add_argument(
|
prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
|
||||||
)
|
|
||||||
|
|
||||||
create_parser = subparsers.add_parser(
|
create_parser = subparsers.add_parser(
|
||||||
'create',
|
'create',
|
||||||
|
@ -215,9 +214,7 @@ def parse_arguments(*arguments):
|
||||||
create_group.add_argument(
|
create_group.add_argument(
|
||||||
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
)
|
)
|
||||||
create_group.add_argument(
|
create_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
|
||||||
)
|
|
||||||
|
|
||||||
check_parser = subparsers.add_parser(
|
check_parser = subparsers.add_parser(
|
||||||
'check',
|
'check',
|
||||||
|
@ -227,9 +224,7 @@ def parse_arguments(*arguments):
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
check_group = check_parser.add_argument_group('check arguments')
|
check_group = check_parser.add_argument_group('check arguments')
|
||||||
check_group.add_argument(
|
check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
|
||||||
)
|
|
||||||
|
|
||||||
extract_parser = subparsers.add_parser(
|
extract_parser = subparsers.add_parser(
|
||||||
'extract',
|
'extract',
|
||||||
|
@ -243,9 +238,7 @@ def parse_arguments(*arguments):
|
||||||
'--repository',
|
'--repository',
|
||||||
help='Path of repository to use, defaults to the configured repository if there is only one',
|
help='Path of repository to use, defaults to the configured repository if there is only one',
|
||||||
)
|
)
|
||||||
extract_group.add_argument(
|
extract_group.add_argument('--archive', help='Name of archive to operate on', required=True)
|
||||||
'--archive', help='Name of archive to operate on', required=True,
|
|
||||||
)
|
|
||||||
extract_group.add_argument(
|
extract_group.add_argument(
|
||||||
'--restore-path',
|
'--restore-path',
|
||||||
nargs='+',
|
nargs='+',
|
||||||
|
@ -264,7 +257,10 @@ def parse_arguments(*arguments):
|
||||||
)
|
)
|
||||||
|
|
||||||
list_parser = subparsers.add_parser(
|
list_parser = subparsers.add_parser(
|
||||||
'list', aliases=SUBPARSER_ALIASES['list'], help='List archives', description='List archives',
|
'list',
|
||||||
|
aliases=SUBPARSER_ALIASES['list'],
|
||||||
|
help='List archives',
|
||||||
|
description='List archives',
|
||||||
add_help=False,
|
add_help=False,
|
||||||
)
|
)
|
||||||
list_group = list_parser.add_argument_group('list arguments')
|
list_group = list_parser.add_argument_group('list arguments')
|
||||||
|
@ -272,15 +268,11 @@ def parse_arguments(*arguments):
|
||||||
'--repository',
|
'--repository',
|
||||||
help='Path of repository to use, defaults to the configured repository if there is only one',
|
help='Path of repository to use, defaults to the configured repository if there is only one',
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument('--archive', help='Name of archive to operate on')
|
||||||
'--archive', help='Name of archive to operate on'
|
|
||||||
)
|
|
||||||
list_group.add_argument(
|
list_group.add_argument(
|
||||||
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
)
|
)
|
||||||
list_group.add_argument(
|
list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
|
||||||
)
|
|
||||||
|
|
||||||
info_parser = subparsers.add_parser(
|
info_parser = subparsers.add_parser(
|
||||||
'info',
|
'info',
|
||||||
|
@ -293,38 +285,40 @@ def parse_arguments(*arguments):
|
||||||
info_group.add_argument(
|
info_group.add_argument(
|
||||||
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
'--json', dest='json', default=False, action='store_true', help='Output results as JSON'
|
||||||
)
|
)
|
||||||
info_group.add_argument(
|
info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||||
'-h', '--help', action='help', help='Show this help message and exit'
|
|
||||||
)
|
|
||||||
|
|
||||||
parsed_arguments = parse_subparser_arguments(arguments, top_level_parser, subparsers)
|
arguments = parse_subparser_arguments(unparsed_arguments, top_level_parser, subparsers)
|
||||||
|
|
||||||
if parsed_arguments.excludes_filename:
|
if arguments['global'].excludes_filename:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'The --excludes option has been replaced with exclude_patterns in configuration'
|
'The --excludes option has been replaced with exclude_patterns in configuration'
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'init' in parsed_arguments and parsed_arguments['global'].dry_run:
|
if 'init' in arguments and arguments['global'].dry_run:
|
||||||
raise ValueError('The init action cannot be used with the --dry-run option')
|
raise ValueError('The init action cannot be used with the --dry-run option')
|
||||||
|
|
||||||
if 'list' in parsed_arguments and 'info' in parsed_arguments and parged_arguments['list'].json and parged_arguments['info'].json:
|
if (
|
||||||
raise ValueError(
|
'list' in arguments
|
||||||
'With the --json option, list and info actions cannot be used together'
|
and 'info' in arguments
|
||||||
)
|
and parged_arguments['list'].json
|
||||||
|
and parged_arguments['info'].json
|
||||||
|
):
|
||||||
|
raise ValueError('With the --json option, list and info actions cannot be used together')
|
||||||
|
|
||||||
# If any of the action flags are explicitly requested, leave them as-is. Otherwise, assume
|
# If any of the action flags are explicitly requested, leave them as-is. Otherwise, assume
|
||||||
# defaults: Mutate the given arguments to enable the default actions.
|
# defaults: Mutate the given arguments to enable the default actions.
|
||||||
if set(parsed_arguments) == {'global'}:
|
if set(arguments) == {'global'}:
|
||||||
parsed_arguments['prune'] = prune_parser.parse_known_args(arguments)
|
arguments['prune'], remaining = prune_parser.parse_known_args(unparsed_arguments)
|
||||||
parsed_arguments['create'] = create_parser.parse_known_args(arguments)
|
arguments['create'], remaining = create_parser.parse_known_args(unparsed_arguments)
|
||||||
parsed_arguments['check'] = check_parser.parse_known_args(arguments)
|
arguments['check'], remaining = check_parser.parse_known_args(unparsed_arguments)
|
||||||
|
|
||||||
return parsed_arguments
|
return arguments
|
||||||
|
|
||||||
|
|
||||||
def run_configuration(config_filename, config, args): # pragma: no cover
|
def run_configuration(config_filename, config, arguments): # pragma: no cover
|
||||||
'''
|
'''
|
||||||
Given a config filename and the corresponding parsed config dict, execute its defined pruning,
|
Given a config filename, the corresponding parsed config dict, and command-line arguments as a
|
||||||
|
dict from subparser name to a namespace of parsed arguments, execute its defined pruning,
|
||||||
backups, consistency checks, and/or other actions.
|
backups, consistency checks, and/or other actions.
|
||||||
|
|
||||||
Yield JSON output strings from executing any actions that produce JSON.
|
Yield JSON output strings from executing any actions that produce JSON.
|
||||||
|
@ -333,24 +327,25 @@ def run_configuration(config_filename, config, args): # pragma: no cover
|
||||||
config.get(section_name, {})
|
config.get(section_name, {})
|
||||||
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
|
for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks')
|
||||||
)
|
)
|
||||||
|
global_arguments = arguments['global']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
local_path = location.get('local_path', 'borg')
|
local_path = location.get('local_path', 'borg')
|
||||||
remote_path = location.get('remote_path')
|
remote_path = location.get('remote_path')
|
||||||
borg_environment.initialize(storage)
|
borg_environment.initialize(storage)
|
||||||
|
|
||||||
if args.create:
|
if 'create' in arguments:
|
||||||
hook.execute_hook(
|
hook.execute_hook(
|
||||||
hooks.get('before_backup'),
|
hooks.get('before_backup'),
|
||||||
hooks.get('umask'),
|
hooks.get('umask'),
|
||||||
config_filename,
|
config_filename,
|
||||||
'pre-backup',
|
'pre-backup',
|
||||||
args.dry_run,
|
global_arguments.dry_run,
|
||||||
)
|
)
|
||||||
|
|
||||||
for repository_path in location['repositories']:
|
for repository_path in location['repositories']:
|
||||||
yield from run_actions(
|
yield from run_actions(
|
||||||
args=args,
|
arguments=arguments,
|
||||||
location=location,
|
location=location,
|
||||||
storage=storage,
|
storage=storage,
|
||||||
retention=retention,
|
retention=retention,
|
||||||
|
@ -360,23 +355,35 @@ def run_configuration(config_filename, config, args): # pragma: no cover
|
||||||
repository_path=repository_path,
|
repository_path=repository_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
if args.create:
|
if 'create' in arguments:
|
||||||
hook.execute_hook(
|
hook.execute_hook(
|
||||||
hooks.get('after_backup'),
|
hooks.get('after_backup'),
|
||||||
hooks.get('umask'),
|
hooks.get('umask'),
|
||||||
config_filename,
|
config_filename,
|
||||||
'post-backup',
|
'post-backup',
|
||||||
args.dry_run,
|
global_arguments.dry_run,
|
||||||
)
|
)
|
||||||
except (OSError, CalledProcessError):
|
except (OSError, CalledProcessError):
|
||||||
hook.execute_hook(
|
hook.execute_hook(
|
||||||
hooks.get('on_error'), hooks.get('umask'), config_filename, 'on-error', args.dry_run
|
hooks.get('on_error'),
|
||||||
|
hooks.get('umask'),
|
||||||
|
config_filename,
|
||||||
|
'on-error',
|
||||||
|
global_arguments.dry_run,
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def run_actions(
|
def run_actions(
|
||||||
*, args, location, storage, retention, consistency, local_path, remote_path, repository_path
|
*,
|
||||||
|
arguments,
|
||||||
|
location,
|
||||||
|
storage,
|
||||||
|
retention,
|
||||||
|
consistency,
|
||||||
|
local_path,
|
||||||
|
remote_path,
|
||||||
|
repository_path
|
||||||
): # pragma: no cover
|
): # pragma: no cover
|
||||||
'''
|
'''
|
||||||
Given parsed command-line arguments as an argparse.ArgumentParser instance, several different
|
Given parsed command-line arguments as an argparse.ArgumentParser instance, several different
|
||||||
|
@ -386,79 +393,94 @@ def run_actions(
|
||||||
Yield JSON output strings from executing any actions that produce JSON.
|
Yield JSON output strings from executing any actions that produce JSON.
|
||||||
'''
|
'''
|
||||||
repository = os.path.expanduser(repository_path)
|
repository = os.path.expanduser(repository_path)
|
||||||
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
|
global_arguments = arguments['global']
|
||||||
if args.init:
|
dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else ''
|
||||||
|
if 'init' in arguments:
|
||||||
logger.info('{}: Initializing repository'.format(repository))
|
logger.info('{}: Initializing repository'.format(repository))
|
||||||
borg_init.initialize_repository(
|
borg_init.initialize_repository(
|
||||||
repository,
|
repository,
|
||||||
args.encryption_mode,
|
arguments['init'].encryption_mode,
|
||||||
args.append_only,
|
arguments['init'].append_only,
|
||||||
args.storage_quota,
|
arguments['init'].storage_quota,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
)
|
)
|
||||||
if args.prune:
|
if 'prune' in arguments:
|
||||||
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
logger.info('{}: Pruning archives{}'.format(repository, dry_run_label))
|
||||||
borg_prune.prune_archives(
|
borg_prune.prune_archives(
|
||||||
args.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
retention,
|
retention,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
stats=args.stats,
|
stats=arguments['prune'].stats,
|
||||||
)
|
)
|
||||||
if args.create:
|
if 'create' in arguments:
|
||||||
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
logger.info('{}: Creating archive{}'.format(repository, dry_run_label))
|
||||||
json_output = borg_create.create_archive(
|
json_output = borg_create.create_archive(
|
||||||
args.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
progress=args.progress,
|
progress=arguments['create'].progress,
|
||||||
stats=args.stats,
|
stats=arguments['create'].stats,
|
||||||
json=args.json,
|
json=arguments['create'].json,
|
||||||
)
|
)
|
||||||
if json_output:
|
if json_output:
|
||||||
yield json.loads(json_output)
|
yield json.loads(json_output)
|
||||||
if args.check and checks.repository_enabled_for_checks(repository, consistency):
|
if 'check' in arguments and checks.repository_enabled_for_checks(
|
||||||
|
repository, consistency
|
||||||
|
):
|
||||||
logger.info('{}: Running consistency checks'.format(repository))
|
logger.info('{}: Running consistency checks'.format(repository))
|
||||||
borg_check.check_archives(
|
borg_check.check_archives(
|
||||||
repository, storage, consistency, local_path=local_path, remote_path=remote_path
|
repository, storage, consistency, local_path=local_path, remote_path=remote_path
|
||||||
)
|
)
|
||||||
if args.extract:
|
if 'extract' in arguments:
|
||||||
if args.repository is None or repository == args.repository:
|
if (
|
||||||
logger.info('{}: Extracting archive {}'.format(repository, args.archive))
|
arguments['extract'].repository is None
|
||||||
|
or repository == arguments['extract'].repository
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
'{}: Extracting archive {}'.format(repository, arguments['extract'].archive)
|
||||||
|
)
|
||||||
borg_extract.extract_archive(
|
borg_extract.extract_archive(
|
||||||
args.dry_run,
|
global_arguments.dry_run,
|
||||||
repository,
|
repository,
|
||||||
args.archive,
|
arguments['extract'].archive,
|
||||||
args.restore_paths,
|
arguments['extract'].restore_paths,
|
||||||
location,
|
location,
|
||||||
storage,
|
storage,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
progress=args.progress,
|
progress=arguments['extract'].progress,
|
||||||
)
|
)
|
||||||
if args.list:
|
if 'list' in arguments:
|
||||||
if args.repository is None or repository == args.repository:
|
if (
|
||||||
|
arguments['list'].repository is None
|
||||||
|
or repository == arguments['list'].repository
|
||||||
|
):
|
||||||
logger.info('{}: Listing archives'.format(repository))
|
logger.info('{}: Listing archives'.format(repository))
|
||||||
json_output = borg_list.list_archives(
|
json_output = borg_list.list_archives(
|
||||||
repository,
|
repository,
|
||||||
storage,
|
storage,
|
||||||
args.archive,
|
arguments['list'].archive,
|
||||||
local_path=local_path,
|
local_path=local_path,
|
||||||
remote_path=remote_path,
|
remote_path=remote_path,
|
||||||
json=args.json,
|
json=arguments['list'].json,
|
||||||
)
|
)
|
||||||
if json_output:
|
if json_output:
|
||||||
yield json.loads(json_output)
|
yield json.loads(json_output)
|
||||||
if args.info:
|
if 'info' in arguments:
|
||||||
logger.info('{}: Displaying summary info for archives'.format(repository))
|
logger.info('{}: Displaying summary info for archives'.format(repository))
|
||||||
json_output = borg_info.display_archives_info(
|
json_output = borg_info.display_archives_info(
|
||||||
repository, storage, local_path=local_path, remote_path=remote_path, json=args.json
|
repository,
|
||||||
|
storage,
|
||||||
|
local_path=local_path,
|
||||||
|
remote_path=remote_path,
|
||||||
|
json=arguments['info'].json,
|
||||||
)
|
)
|
||||||
if json_output:
|
if json_output:
|
||||||
yield json.loads(json_output)
|
yield json.loads(json_output)
|
||||||
|
@ -499,19 +521,27 @@ def load_configurations(config_filenames):
|
||||||
return (configs, logs)
|
return (configs, logs)
|
||||||
|
|
||||||
|
|
||||||
def collect_configuration_run_summary_logs(configs, args):
|
def collect_configuration_run_summary_logs(configs, arguments):
|
||||||
'''
|
'''
|
||||||
Given a dict of configuration filename to corresponding parsed configuration, and parsed
|
Given a dict of configuration filename to corresponding parsed configuration, and parsed
|
||||||
command-line arguments as an argparse.ArgumentParser instance, run each configuration file and
|
command-line arguments as a dict from subparser name to a parsed namespace of arguments, run
|
||||||
yield a series of logging.LogRecord instances containing summary information about each run.
|
each configuration file and yield a series of logging.LogRecord instances containing summary
|
||||||
|
information about each run.
|
||||||
|
|
||||||
As a side effect of running through these configuration files, output their JSON results, if
|
As a side effect of running through these configuration files, output their JSON results, if
|
||||||
any, to stdout.
|
any, to stdout.
|
||||||
'''
|
'''
|
||||||
# Run cross-file validation checks.
|
# Run cross-file validation checks.
|
||||||
if args.extract or (args.list and args.archive):
|
if 'extract' in arguments:
|
||||||
|
repository = arguments['extract'].repository
|
||||||
|
elif 'list' in arguments and arguments['list'].archive:
|
||||||
|
repository = arguments['list'].repository
|
||||||
|
else:
|
||||||
|
repository = None
|
||||||
|
|
||||||
|
if repository:
|
||||||
try:
|
try:
|
||||||
validate.guard_configuration_contains_repository(args.repository, configs)
|
validate.guard_configuration_contains_repository(repository, configs)
|
||||||
except ValueError as error:
|
except ValueError as error:
|
||||||
yield logging.makeLogRecord(
|
yield logging.makeLogRecord(
|
||||||
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error)
|
||||||
|
@ -522,7 +552,7 @@ def collect_configuration_run_summary_logs(configs, args):
|
||||||
json_results = []
|
json_results = []
|
||||||
for config_filename, config in configs.items():
|
for config_filename, config in configs.items():
|
||||||
try:
|
try:
|
||||||
json_results.extend(list(run_configuration(config_filename, config, args)))
|
json_results.extend(list(run_configuration(config_filename, config, arguments)))
|
||||||
yield logging.makeLogRecord(
|
yield logging.makeLogRecord(
|
||||||
dict(
|
dict(
|
||||||
levelno=logging.INFO,
|
levelno=logging.INFO,
|
||||||
|
@ -550,7 +580,9 @@ def collect_configuration_run_summary_logs(configs, args):
|
||||||
dict(
|
dict(
|
||||||
levelno=logging.CRITICAL,
|
levelno=logging.CRITICAL,
|
||||||
levelname='CRITICAL',
|
levelname='CRITICAL',
|
||||||
msg='{}: No configuration files found'.format(' '.join(args.config_paths)),
|
msg='{}: No configuration files found'.format(
|
||||||
|
' '.join(arguments['global'].config_paths)
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -568,7 +600,7 @@ def main(): # pragma: no cover
|
||||||
configure_signals()
|
configure_signals()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
args = parse_arguments(*sys.argv[1:])
|
arguments = parse_arguments(*sys.argv[1:])
|
||||||
except ValueError as error:
|
except ValueError as error:
|
||||||
configure_logging(logging.CRITICAL)
|
configure_logging(logging.CRITICAL)
|
||||||
logger.critical(error)
|
logger.critical(error)
|
||||||
|
@ -580,22 +612,24 @@ def main(): # pragma: no cover
|
||||||
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
|
logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv)))
|
||||||
exit_with_help_link()
|
exit_with_help_link()
|
||||||
|
|
||||||
if args.version:
|
global_arguments = arguments['global']
|
||||||
|
if global_arguments.version:
|
||||||
print(pkg_resources.require('borgmatic')[0].version)
|
print(pkg_resources.require('borgmatic')[0].version)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
config_filenames = tuple(collect.collect_config_filenames(args.config_paths))
|
config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths))
|
||||||
configs, parse_logs = load_configurations(config_filenames)
|
configs, parse_logs = load_configurations(config_filenames)
|
||||||
|
|
||||||
colorama.init(autoreset=True, strip=not should_do_markup(args.no_color, configs))
|
colorama.init(autoreset=True, strip=not should_do_markup(global_arguments.no_color, configs))
|
||||||
configure_logging(
|
configure_logging(
|
||||||
verbosity_to_log_level(args.verbosity), verbosity_to_log_level(args.syslog_verbosity)
|
verbosity_to_log_level(global_arguments.verbosity),
|
||||||
|
verbosity_to_log_level(global_arguments.syslog_verbosity),
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug('Ensuring legacy configuration is upgraded')
|
logger.debug('Ensuring legacy configuration is upgraded')
|
||||||
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
|
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
|
||||||
|
|
||||||
summary_logs = list(collect_configuration_run_summary_logs(configs, args))
|
summary_logs = list(collect_configuration_run_summary_logs(configs, arguments))
|
||||||
|
|
||||||
logger.info('')
|
logger.info('')
|
||||||
logger.info('summary:')
|
logger.info('summary:')
|
||||||
|
|
Loading…
Reference in a new issue