Use Black code formatter as part of running automated tests.

This commit is contained in:
Dan Helfman 2018-09-29 22:45:00 -07:00
parent 3db17277b4
commit 76d6a69f5a
37 changed files with 484 additions and 592 deletions

3
NEWS
View file

@ -1,3 +1,6 @@
1.2.7.dev0
* Use Black code formatter as part of running automated tests.
1.2.6 1.2.6
* Fix generated configuration to also include a "keep_daily" value so pruning works out of the * Fix generated configuration to also include a "keep_daily" value so pruning works out of the
box. box.

View file

@ -382,6 +382,10 @@ the following deviations from it:
* Within multiline constructs, use standard four-space indentation. Don't align * Within multiline constructs, use standard four-space indentation. Don't align
indentation with an opening delimeter. indentation with an opening delimeter.
borgmatic code uses the [Black](https://github.com/ambv/black) code formatter,
so some additional code style requirements will be enforced as well. See the
Black documentation for more information.
### Development ### Development
@ -433,6 +437,17 @@ cd borgmatic
tox tox
``` ```
Note that while running borgmatic itself only requires Python 3+, running
borgmatic's tests require Python 3.6+.
If when running tests, you get an error from the
[Black](https://github.com/ambv/black) code formatter about files that would
be reformatted, you can ask Black to format them for you via the following:
```bash
tox -e black
```
## Troubleshooting ## Troubleshooting

View file

@ -31,7 +31,9 @@ def _parse_checks(consistency_config):
if checks == ['disabled']: if checks == ['disabled']:
return () return ()
return tuple(check for check in checks if check.lower() not in ('disabled', '')) or DEFAULT_CHECKS return (
tuple(check for check in checks if check.lower() not in ('disabled', '')) or DEFAULT_CHECKS
)
def _make_check_flags(checks, check_last=None, prefix=None): def _make_check_flags(checks, check_last=None, prefix=None):
@ -60,20 +62,27 @@ def _make_check_flags(checks, check_last=None, prefix=None):
last_flags = () last_flags = ()
prefix_flags = () prefix_flags = ()
if check_last: if check_last:
logger.warning('Ignoring check_last option, as "archives" is not in consistency checks.') logger.warning(
'Ignoring check_last option, as "archives" is not in consistency checks.'
)
if prefix: if prefix:
logger.warning('Ignoring consistency prefix option, as "archives" is not in consistency checks.') logger.warning(
'Ignoring consistency prefix option, as "archives" is not in consistency checks.'
)
if set(DEFAULT_CHECKS).issubset(set(checks)): if set(DEFAULT_CHECKS).issubset(set(checks)):
return last_flags + prefix_flags return last_flags + prefix_flags
return tuple( return (
'--{}-only'.format(check) for check in checks tuple('--{}-only'.format(check) for check in checks if check in DEFAULT_CHECKS)
if check in DEFAULT_CHECKS + last_flags
) + last_flags + prefix_flags + prefix_flags
)
def check_archives(repository, storage_config, consistency_config, local_path='borg', remote_path=None): def check_archives(
repository, storage_config, consistency_config, local_path='borg', remote_path=None
):
''' '''
Given a local or remote repository path, a storage config dict, a consistency config dict, Given a local or remote repository path, a storage config dict, a consistency config dict,
and a local/remote commands to run, check the contained Borg archives for consistency. and a local/remote commands to run, check the contained Borg archives for consistency.
@ -98,9 +107,12 @@ def check_archives(repository, storage_config, consistency_config, local_path='b
prefix = consistency_config.get('prefix') prefix = consistency_config.get('prefix')
full_command = ( full_command = (
local_path, 'check', (local_path, 'check', repository)
repository, + _make_check_flags(checks, check_last, prefix)
) + _make_check_flags(checks, check_last, prefix) + remote_path_flags + lock_wait_flags + verbosity_flags + remote_path_flags
+ lock_wait_flags
+ verbosity_flags
)
# The check command spews to stdout/stderr even without the verbose flag. Suppress it. # The check command spews to stdout/stderr even without the verbose flag. Suppress it.
stdout = None if verbosity_flags else open(os.devnull, 'w') stdout = None if verbosity_flags else open(os.devnull, 'w')

View file

@ -42,10 +42,7 @@ def _expand_directories(directories):
return () return ()
return tuple( return tuple(
itertools.chain.from_iterable( itertools.chain.from_iterable(_expand_directory(directory) for directory in directories)
_expand_directory(directory)
for directory in directories
)
) )
@ -75,8 +72,7 @@ def _make_pattern_flags(location_config, pattern_filename=None):
return tuple( return tuple(
itertools.chain.from_iterable( itertools.chain.from_iterable(
('--patterns-from', pattern_filename) ('--patterns-from', pattern_filename) for pattern_filename in pattern_filenames
for pattern_filename in pattern_filenames
) )
) )
@ -91,8 +87,7 @@ def _make_exclude_flags(location_config, exclude_filename=None):
) )
exclude_from_flags = tuple( exclude_from_flags = tuple(
itertools.chain.from_iterable( itertools.chain.from_iterable(
('--exclude-from', exclude_filename) ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames
for exclude_filename in exclude_filenames
) )
) )
caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else () caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else ()
@ -103,7 +98,14 @@ def _make_exclude_flags(location_config, exclude_filename=None):
def create_archive( def create_archive(
dry_run, repository, location_config, storage_config, local_path='borg', remote_path=None, json=False): dry_run,
repository,
location_config,
storage_config,
local_path='borg',
remote_path=None,
json=False,
):
''' '''
Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a
storage config dict, create a Borg archive. storage config dict, create a Borg archive.
@ -123,21 +125,15 @@ def create_archive(
full_command = ( full_command = (
( (
local_path, 'create', local_path,
'create',
'{repository}::{archive_name_format}'.format( '{repository}::{archive_name_format}'.format(
repository=repository, repository=repository, archive_name_format=archive_name_format
archive_name_format=archive_name_format,
), ),
) )
+ sources + sources
+ _make_pattern_flags( + _make_pattern_flags(location_config, pattern_file.name if pattern_file else None)
location_config, + _make_exclude_flags(location_config, exclude_file.name if exclude_file else None)
pattern_file.name if pattern_file else None,
)
+ _make_exclude_flags(
location_config,
exclude_file.name if exclude_file else None,
)
+ (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ())
+ (('--compression', compression) if compression else ()) + (('--compression', compression) if compression else ())
+ (('--remote-ratelimit', str(remote_rate_limit)) if remote_rate_limit else ()) + (('--remote-ratelimit', str(remote_rate_limit)) if remote_rate_limit else ())
@ -148,7 +144,7 @@ def create_archive(
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ()) + (('--umask', str(umask)) if umask else ())
+ (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ())
+ (('--list', '--filter', 'AME',) if logger.isEnabledFor(logging.INFO) else ()) + (('--list', '--filter', 'AME') if logger.isEnabledFor(logging.INFO) else ())
+ (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ())
+ (('--stats',) if not dry_run and logger.isEnabledFor(logging.INFO) else ()) + (('--stats',) if not dry_run and logger.isEnabledFor(logging.INFO) else ())
+ (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ())

View file

@ -19,12 +19,12 @@ def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg',
elif logger.isEnabledFor(logging.INFO): elif logger.isEnabledFor(logging.INFO):
verbosity_flags = ('--info',) verbosity_flags = ('--info',)
full_list_command = ( full_list_command = (
local_path, 'list', (local_path, 'list', '--short', repository)
'--short', + remote_path_flags
repository, + lock_wait_flags
) + remote_path_flags + lock_wait_flags + verbosity_flags + verbosity_flags
)
list_output = subprocess.check_output(full_list_command).decode(sys.stdout.encoding) list_output = subprocess.check_output(full_list_command).decode(sys.stdout.encoding)
@ -34,13 +34,19 @@ def extract_last_archive_dry_run(repository, lock_wait=None, local_path='borg',
list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else ()
full_extract_command = ( full_extract_command = (
local_path, 'extract', (
local_path,
'extract',
'--dry-run', '--dry-run',
'{repository}::{last_archive_name}'.format( '{repository}::{last_archive_name}'.format(
repository=repository, repository=repository, last_archive_name=last_archive_name
last_archive_name=last_archive_name,
), ),
) + remote_path_flags + lock_wait_flags + verbosity_flags + list_flag )
+ remote_path_flags
+ lock_wait_flags
+ verbosity_flags
+ list_flag
)
logger.debug(' '.join(full_extract_command)) logger.debug(' '.join(full_extract_command))
subprocess.check_call(full_extract_command) subprocess.check_call(full_extract_command)

View file

@ -5,7 +5,9 @@ import subprocess
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def display_archives_info(repository, storage_config, local_path='borg', remote_path=None, json=False): def display_archives_info(
repository, storage_config, local_path='borg', remote_path=None, json=False
):
''' '''
Given a local or remote repository path, and a storage config dict, Given a local or remote repository path, and a storage config dict,
display summary information for Borg archives in the repository. display summary information for Borg archives in the repository.

View file

@ -2,7 +2,6 @@ import logging
import subprocess import subprocess
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -31,7 +30,9 @@ def _make_prune_flags(retention_config):
) )
def prune_archives(dry_run, repository, storage_config, retention_config, local_path='borg', remote_path=None): def prune_archives(
dry_run, repository, storage_config, retention_config, local_path='borg', remote_path=None
):
''' '''
Given dry-run flag, a local or remote repository path, a storage config dict, and a Given dry-run flag, a local or remote repository path, a storage config dict, and a
retention config dict, prune Borg archives according to the retention policy specified in that retention config dict, prune Borg archives according to the retention policy specified in that
@ -41,14 +42,8 @@ def prune_archives(dry_run, repository, storage_config, retention_config, local_
lock_wait = storage_config.get('lock_wait', None) lock_wait = storage_config.get('lock_wait', None)
full_command = ( full_command = (
( (local_path, 'prune', repository)
local_path, 'prune', + tuple(element for pair in _make_prune_flags(retention_config) for element in pair)
repository,
) + tuple(
element
for pair in _make_prune_flags(retention_config)
for element in pair
)
+ (('--remote-path', remote_path) if remote_path else ()) + (('--remote-path', remote_path) if remote_path else ())
+ (('--umask', str(umask)) if umask else ()) + (('--umask', str(umask)) if umask else ())
+ (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ())

View file

@ -5,8 +5,13 @@ import os
from subprocess import CalledProcessError from subprocess import CalledProcessError
import sys import sys
from borgmatic.borg import check as borg_check, create as borg_create, prune as borg_prune, \ from borgmatic.borg import (
list as borg_list, info as borg_info check as borg_check,
create as borg_create,
prune as borg_prune,
list as borg_list,
info as borg_info,
)
from borgmatic.commands import hook from borgmatic.commands import hook
from borgmatic.config import collect, convert, validate from borgmatic.config import collect, convert, validate
from borgmatic.signals import configure_signals from borgmatic.signals import configure_signals
@ -27,19 +32,21 @@ def parse_arguments(*arguments):
config_paths = collect.get_default_config_paths() config_paths = collect.get_default_config_paths()
parser = ArgumentParser( parser = ArgumentParser(
description= description='''
'''
A simple wrapper script for the Borg backup software that creates and prunes backups. A simple wrapper script for the Borg backup software that creates and prunes backups.
If none of the --prune, --create, or --check options are given, then borgmatic defaults If none of the --prune, --create, or --check options are given, then borgmatic defaults
to all three: prune, create, and check archives. to all three: prune, create, and check archives.
''' '''
) )
parser.add_argument( parser.add_argument(
'-c', '--config', '-c',
'--config',
nargs='+', nargs='+',
dest='config_paths', dest='config_paths',
default=config_paths, default=config_paths,
help='Configuration filenames or directories, defaults to: {}'.format(' '.join(config_paths)), help='Configuration filenames or directories, defaults to: {}'.format(
' '.join(config_paths)
),
) )
parser.add_argument( parser.add_argument(
'--excludes', '--excludes',
@ -47,31 +54,26 @@ def parse_arguments(*arguments):
help='Deprecated in favor of exclude_patterns within configuration', help='Deprecated in favor of exclude_patterns within configuration',
) )
parser.add_argument( parser.add_argument(
'-p', '--prune', '-p',
'--prune',
dest='prune', dest='prune',
action='store_true', action='store_true',
help='Prune archives according to the retention policy', help='Prune archives according to the retention policy',
) )
parser.add_argument( parser.add_argument(
'-C', '--create', '-C',
'--create',
dest='create', dest='create',
action='store_true', action='store_true',
help='Create archives (actually perform backups)', help='Create archives (actually perform backups)',
) )
parser.add_argument( parser.add_argument(
'-k', '--check', '-k', '--check', dest='check', action='store_true', help='Check archives for consistency'
dest='check',
action='store_true',
help='Check archives for consistency',
) )
parser.add_argument('-l', '--list', dest='list', action='store_true', help='List archives')
parser.add_argument( parser.add_argument(
'-l', '--list', '-i',
dest='list', '--info',
action='store_true',
help='List archives',
)
parser.add_argument(
'-i', '--info',
dest='info', dest='info',
action='store_true', action='store_true',
help='Display summary information on archives', help='Display summary information on archives',
@ -84,13 +86,15 @@ def parse_arguments(*arguments):
help='Output results from the --create, --list, or --info options as json', help='Output results from the --create, --list, or --info options as json',
) )
parser.add_argument( parser.add_argument(
'-n', '--dry-run', '-n',
'--dry-run',
dest='dry_run', dest='dry_run',
action='store_true', action='store_true',
help='Go through the motions, but do not actually write to any repositories', help='Go through the motions, but do not actually write to any repositories',
) )
parser.add_argument( parser.add_argument(
'-v', '--verbosity', '-v',
'--verbosity',
type=int, type=int,
choices=range(0, 3), choices=range(0, 3),
default=0, default=0,
@ -100,7 +104,9 @@ def parse_arguments(*arguments):
args = parser.parse_args(arguments) args = parser.parse_args(arguments)
if args.json and not (args.create or args.list or args.info): if args.json and not (args.create or args.list or args.info):
raise ValueError('The --json option can only be used with the --create, --list, or --info options') raise ValueError(
'The --json option can only be used with the --create, --list, or --info options'
)
if args.json and args.list and args.info: if args.json and args.list and args.info:
raise ValueError( raise ValueError(
@ -151,7 +157,14 @@ def _run_commands(args, consistency, local_path, location, remote_path, retentio
json_results = [] json_results = []
for unexpanded_repository in location['repositories']: for unexpanded_repository in location['repositories']:
_run_commands_on_repository( _run_commands_on_repository(
args, consistency, json_results, local_path, location, remote_path, retention, storage, args,
consistency,
json_results,
local_path,
location,
remote_path,
retention,
storage,
unexpanded_repository, unexpanded_repository,
) )
if args.json: if args.json:
@ -159,8 +172,15 @@ def _run_commands(args, consistency, local_path, location, remote_path, retentio
def _run_commands_on_repository( def _run_commands_on_repository(
args, consistency, json_results, local_path, location, remote_path, args,
retention, storage, unexpanded_repository, consistency,
json_results,
local_path,
location,
remote_path,
retention,
storage,
unexpanded_repository,
): # pragma: no cover ): # pragma: no cover
repository = os.path.expanduser(unexpanded_repository) repository = os.path.expanduser(unexpanded_repository)
dry_run_label = ' (dry run; not making any changes)' if args.dry_run else '' dry_run_label = ' (dry run; not making any changes)' if args.dry_run else ''
@ -187,20 +207,12 @@ def _run_commands_on_repository(
if args.check: if args.check:
logger.info('{}: Running consistency checks'.format(repository)) logger.info('{}: Running consistency checks'.format(repository))
borg_check.check_archives( borg_check.check_archives(
repository, repository, storage, consistency, local_path=local_path, remote_path=remote_path
storage,
consistency,
local_path=local_path,
remote_path=remote_path
) )
if args.list: if args.list:
logger.info('{}: Listing archives'.format(repository)) logger.info('{}: Listing archives'.format(repository))
output = borg_list.list_archives( output = borg_list.list_archives(
repository, repository, storage, local_path=local_path, remote_path=remote_path, json=args.json
storage,
local_path=local_path,
remote_path=remote_path,
json=args.json,
) )
if args.json: if args.json:
json_results.append(json.loads(output)) json_results.append(json.loads(output))
@ -209,11 +221,7 @@ def _run_commands_on_repository(
if args.info: if args.info:
logger.info('{}: Displaying summary info for archives'.format(repository)) logger.info('{}: Displaying summary info for archives'.format(repository))
output = borg_info.display_archives_info( output = borg_info.display_archives_info(
repository, repository, storage, local_path=local_path, remote_path=remote_path, json=args.json
storage,
local_path=local_path,
remote_path=remote_path,
json=args.json,
) )
if args.json: if args.json:
json_results.append(json.loads(output)) json_results.append(json.loads(output))
@ -232,7 +240,9 @@ def main(): # pragma: no cover
convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames) convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames)
if len(config_filenames) == 0: if len(config_filenames) == 0:
raise ValueError('Error: No configuration files found in: {}'.format(' '.join(args.config_paths))) raise ValueError(
'Error: No configuration files found in: {}'.format(' '.join(args.config_paths))
)
for config_filename in config_filenames: for config_filename in config_filenames:
run_configuration(config_filename, args) run_configuration(config_filename, args)

View file

@ -26,22 +26,31 @@ def parse_arguments(*arguments):
''' '''
) )
parser.add_argument( parser.add_argument(
'-s', '--source-config', '-s',
'--source-config',
dest='source_config_filename', dest='source_config_filename',
default=DEFAULT_SOURCE_CONFIG_FILENAME, default=DEFAULT_SOURCE_CONFIG_FILENAME,
help='Source INI-style configuration filename. Default: {}'.format(DEFAULT_SOURCE_CONFIG_FILENAME), help='Source INI-style configuration filename. Default: {}'.format(
DEFAULT_SOURCE_CONFIG_FILENAME
),
) )
parser.add_argument( parser.add_argument(
'-e', '--source-excludes', '-e',
'--source-excludes',
dest='source_excludes_filename', dest='source_excludes_filename',
default=DEFAULT_SOURCE_EXCLUDES_FILENAME if os.path.exists(DEFAULT_SOURCE_EXCLUDES_FILENAME) else None, default=DEFAULT_SOURCE_EXCLUDES_FILENAME
if os.path.exists(DEFAULT_SOURCE_EXCLUDES_FILENAME)
else None,
help='Excludes filename', help='Excludes filename',
) )
parser.add_argument( parser.add_argument(
'-d', '--destination-config', '-d',
'--destination-config',
dest='destination_config_filename', dest='destination_config_filename',
default=DEFAULT_DESTINATION_CONFIG_FILENAME, default=DEFAULT_DESTINATION_CONFIG_FILENAME,
help='Destination YAML configuration filename. Default: {}'.format(DEFAULT_DESTINATION_CONFIG_FILENAME), help='Destination YAML configuration filename. Default: {}'.format(
DEFAULT_DESTINATION_CONFIG_FILENAME
),
) )
return parser.parse_args(arguments) return parser.parse_args(arguments)
@ -61,7 +70,9 @@ def display_result(args): # pragma: no cover
delete_lines = textwrap.wrap( delete_lines = textwrap.wrap(
'Once you are satisfied, you can safely delete {}{}.'.format( 'Once you are satisfied, you can safely delete {}{}.'.format(
args.source_config_filename, args.source_config_filename,
' and {}'.format(args.source_excludes_filename) if args.source_excludes_filename else '', ' and {}'.format(args.source_excludes_filename)
if args.source_excludes_filename
else '',
), ),
TEXT_WRAP_CHARACTERS, TEXT_WRAP_CHARACTERS,
) )
@ -75,7 +86,9 @@ def main(): # pragma: no cover
try: try:
args = parse_arguments(*sys.argv[1:]) args = parse_arguments(*sys.argv[1:])
schema = yaml.round_trip_load(open(validate.schema_filename()).read()) schema = yaml.round_trip_load(open(validate.schema_filename()).read())
source_config = legacy.parse_configuration(args.source_config_filename, legacy.CONFIG_FORMAT) source_config = legacy.parse_configuration(
args.source_config_filename, legacy.CONFIG_FORMAT
)
source_config_file_mode = os.stat(args.source_config_filename).st_mode source_config_file_mode = os.stat(args.source_config_filename).st_mode
source_excludes = ( source_excludes = (
open(args.source_excludes_filename).read().splitlines() open(args.source_excludes_filename).read().splitlines()
@ -83,12 +96,12 @@ def main(): # pragma: no cover
else [] else []
) )
destination_config = convert.convert_legacy_parsed_config(source_config, source_excludes, schema) destination_config = convert.convert_legacy_parsed_config(
source_config, source_excludes, schema
)
generate.write_configuration( generate.write_configuration(
args.destination_config_filename, args.destination_config_filename, destination_config, mode=source_config_file_mode
destination_config,
mode=source_config_file_mode,
) )
display_result(args) display_result(args)

View file

@ -16,10 +16,13 @@ def parse_arguments(*arguments):
''' '''
parser = ArgumentParser(description='Generate a sample borgmatic YAML configuration file.') parser = ArgumentParser(description='Generate a sample borgmatic YAML configuration file.')
parser.add_argument( parser.add_argument(
'-d', '--destination', '-d',
'--destination',
dest='destination_filename', dest='destination_filename',
default=DEFAULT_DESTINATION_CONFIG_FILENAME, default=DEFAULT_DESTINATION_CONFIG_FILENAME,
help='Destination YAML configuration filename. Default: {}'.format(DEFAULT_DESTINATION_CONFIG_FILENAME), help='Destination YAML configuration filename. Default: {}'.format(
DEFAULT_DESTINATION_CONFIG_FILENAME
),
) )
return parser.parse_args(arguments) return parser.parse_args(arguments)
@ -29,7 +32,9 @@ def main(): # pragma: no cover
try: try:
args = parse_arguments(*sys.argv[1:]) args = parse_arguments(*sys.argv[1:])
generate.generate_sample_configuration(args.destination_filename, validate.schema_filename()) generate.generate_sample_configuration(
args.destination_filename, validate.schema_filename()
)
print('Generated a sample configuration file at {}.'.format(args.destination_filename)) print('Generated a sample configuration file at {}.'.format(args.destination_filename))
print() print()

View file

@ -13,7 +13,11 @@ def execute_hook(commands, config_filename, description):
if len(commands) == 1: if len(commands) == 1:
logger.info('{}: Running command for {} hook'.format(config_filename, description)) logger.info('{}: Running command for {} hook'.format(config_filename, description))
else: else:
logger.info('{}: Running {} commands for {} hook'.format(config_filename, len(commands), description)) logger.info(
'{}: Running {} commands for {} hook'.format(
config_filename, len(commands), description
)
)
for command in commands: for command in commands:
logger.debug('{}: Hook command: {}'.format(config_filename, command)) logger.debug('{}: Hook command: {}'.format(config_filename, command))

View file

@ -7,8 +7,8 @@ def get_default_config_paths():
default configuration paths. This includes both system-wide configuration and configuration in default configuration paths. This includes both system-wide configuration and configuration in
the current user's home directory. the current user's home directory.
''' '''
user_config_directory = ( user_config_directory = os.getenv('XDG_CONFIG_HOME') or os.path.expandvars(
os.getenv('XDG_CONFIG_HOME') or os.path.expandvars(os.path.join('$HOME', '.config')) os.path.join('$HOME', '.config')
) )
return [ return [

View file

@ -12,14 +12,17 @@ def _convert_section(source_section_config, section_schema):
Where integer types exist in the given section schema, convert their values to integers. Where integer types exist in the given section schema, convert their values to integers.
''' '''
destination_section_config = yaml.comments.CommentedMap([ destination_section_config = yaml.comments.CommentedMap(
[
( (
option_name, option_name,
int(option_value) int(option_value)
if section_schema['map'].get(option_name, {}).get('type') == 'int' else option_value if section_schema['map'].get(option_name, {}).get('type') == 'int'
else option_value,
) )
for option_name, option_value in source_section_config.items() for option_name, option_value in source_section_config.items()
]) ]
)
return destination_section_config return destination_section_config
@ -33,10 +36,12 @@ def convert_legacy_parsed_config(source_config, source_excludes, schema):
Additionally, use the given schema as a source of helpful comments to include within the Additionally, use the given schema as a source of helpful comments to include within the
returned CommentedMap. returned CommentedMap.
''' '''
destination_config = yaml.comments.CommentedMap([ destination_config = yaml.comments.CommentedMap(
[
(section_name, _convert_section(section_config, schema['map'][section_name])) (section_name, _convert_section(section_config, schema['map'][section_name]))
for section_name, section_config in source_config._asdict().items() for section_name, section_config in source_config._asdict().items()
]) ]
)
# Split space-seperated values into actual lists, make "repository" into a list, and merge in # Split space-seperated values into actual lists, make "repository" into a list, and merge in
# excludes. # excludes.
@ -53,9 +58,7 @@ def convert_legacy_parsed_config(source_config, source_excludes, schema):
for section_name, section_config in destination_config.items(): for section_name, section_config in destination_config.items():
generate.add_comments_to_configuration( generate.add_comments_to_configuration(
section_config, section_config, schema['map'][section_name], indent=generate.INDENT
schema['map'][section_name],
indent=generate.INDENT,
) )
return destination_config return destination_config
@ -85,8 +88,7 @@ def guard_configuration_upgraded(source_config_filename, destination_config_file
The idea is that we want to alert the user about upgrading their config if they haven't already. The idea is that we want to alert the user about upgrading their config if they haven't already.
''' '''
destination_config_exists = any( destination_config_exists = any(
os.path.exists(filename) os.path.exists(filename) for filename in destination_config_filenames
for filename in destination_config_filenames
) )
if os.path.exists(source_config_filename) and not destination_config_exists: if os.path.exists(source_config_filename) and not destination_config_exists:

View file

@ -13,8 +13,7 @@ def _insert_newline_before_comment(config, field_name):
field and its comments. field and its comments.
''' '''
config.ca.items[field_name][1].insert( config.ca.items[field_name][1].insert(
0, 0, yaml.tokens.CommentToken('\n', yaml.error.CommentMark(0), None)
yaml.tokens.CommentToken('\n', yaml.error.CommentMark(0), None),
) )
@ -27,13 +26,12 @@ def _schema_to_sample_configuration(schema, level=0):
if example is not None: if example is not None:
return example return example
config = yaml.comments.CommentedMap([ config = yaml.comments.CommentedMap(
( [
section_name, (section_name, _schema_to_sample_configuration(section_schema, level + 1))
_schema_to_sample_configuration(section_schema, level + 1),
)
for section_name, section_schema in schema['map'].items() for section_name, section_schema in schema['map'].items()
]) ]
)
add_comments_to_configuration(config, schema, indent=(level * INDENT)) add_comments_to_configuration(config, schema, indent=(level * INDENT))
@ -130,11 +128,7 @@ def add_comments_to_configuration(config, schema, indent=0):
if not field_schema or not description: if not field_schema or not description:
continue continue
config.yaml_set_comment_before_after_key( config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent)
key=field_name,
before=description,
indent=indent,
)
if index > 0: if index > 0:
_insert_newline_before_comment(config, field_name) _insert_newline_before_comment(config, field_name)
@ -148,6 +142,5 @@ def generate_sample_configuration(config_filename, schema_filename):
config = _schema_to_sample_configuration(schema) config = _schema_to_sample_configuration(schema)
write_configuration( write_configuration(
config_filename, config_filename, _comment_out_optional_configuration(_render_configuration(config))
_comment_out_optional_configuration(_render_configuration(config))
) )

View file

@ -45,13 +45,9 @@ CONFIG_FORMAT = (
), ),
), ),
Section_format( Section_format(
'consistency', 'consistency', (option('checks', required=False), option('check_last', required=False))
(
option('checks', required=False),
option('check_last', required=False),
), ),
) )
)
def validate_configuration_format(parser, config_format): def validate_configuration_format(parser, config_format):
@ -66,7 +62,8 @@ def validate_configuration_format(parser, config_format):
''' '''
section_names = set(parser.sections()) section_names = set(parser.sections())
required_section_names = tuple( required_section_names = tuple(
section.name for section in config_format section.name
for section in config_format
if any(option.required for option in section.options) if any(option.required for option in section.options)
) )
@ -80,9 +77,7 @@ def validate_configuration_format(parser, config_format):
missing_section_names = set(required_section_names) - section_names missing_section_names = set(required_section_names) - section_names
if missing_section_names: if missing_section_names:
raise ValueError( raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names)))
'Missing config sections: {}'.format(', '.join(missing_section_names))
)
for section_format in config_format: for section_format in config_format:
if section_format.name not in section_names: if section_format.name not in section_names:
@ -91,26 +86,28 @@ def validate_configuration_format(parser, config_format):
option_names = parser.options(section_format.name) option_names = parser.options(section_format.name)
expected_options = section_format.options expected_options = section_format.options
unexpected_option_names = set(option_names) - set(option.name for option in expected_options) unexpected_option_names = set(option_names) - set(
option.name for option in expected_options
)
if unexpected_option_names: if unexpected_option_names:
raise ValueError( raise ValueError(
'Unexpected options found in config section {}: {}'.format( 'Unexpected options found in config section {}: {}'.format(
section_format.name, section_format.name, ', '.join(sorted(unexpected_option_names))
', '.join(sorted(unexpected_option_names)),
) )
) )
missing_option_names = tuple( missing_option_names = tuple(
option.name for option in expected_options if option.required option.name
for option in expected_options
if option.required
if option.name not in option_names if option.name not in option_names
) )
if missing_option_names: if missing_option_names:
raise ValueError( raise ValueError(
'Required options missing from config section {}: {}'.format( 'Required options missing from config section {}: {}'.format(
section_format.name, section_format.name, ', '.join(missing_option_names)
', '.join(missing_option_names)
) )
) )
@ -123,11 +120,7 @@ def parse_section_options(parser, section_format):
Raise ValueError if any option values cannot be coerced to the expected Python data type. Raise ValueError if any option values cannot be coerced to the expected Python data type.
''' '''
type_getter = { type_getter = {str: parser.get, int: parser.getint, bool: parser.getboolean}
str: parser.get,
int: parser.getint,
bool: parser.getboolean,
}
return OrderedDict( return OrderedDict(
(option.name, type_getter[option.value_type](section_format.name, option.name)) (option.name, type_getter[option.value_type](section_format.name, option.name))
@ -151,11 +144,10 @@ def parse_configuration(config_filename, config_format):
# Describes a parsed configuration, where each attribute is the name of a configuration file # Describes a parsed configuration, where each attribute is the name of a configuration file
# section and each value is a dict of that section's parsed options. # section and each value is a dict of that section's parsed options.
Parsed_config = namedtuple('Parsed_config', (section_format.name for section_format in config_format)) Parsed_config = namedtuple(
'Parsed_config', (section_format.name for section_format in config_format)
)
return Parsed_config( return Parsed_config(
*( *(parse_section_options(parser, section_format) for section_format in config_format)
parse_section_options(parser, section_format)
for section_format in config_format
)
) )

View file

@ -24,6 +24,7 @@ class Validation_error(ValueError):
A collection of error message strings generated when attempting to validate a particular A collection of error message strings generated when attempting to validate a particular
configurartion file. configurartion file.
''' '''
def __init__(self, config_filename, error_messages): def __init__(self, config_filename, error_messages):
self.config_filename = config_filename self.config_filename = config_filename
self.error_messages = error_messages self.error_messages = error_messages
@ -48,15 +49,16 @@ def apply_logical_validation(config_filename, parsed_configuration):
if archive_name_format and not prefix: if archive_name_format and not prefix:
raise Validation_error( raise Validation_error(
config_filename, ( config_filename,
'If you provide an archive_name_format, you must also specify a retention prefix.', ('If you provide an archive_name_format, you must also specify a retention prefix.',),
)
) )
consistency_prefix = parsed_configuration.get('consistency', {}).get('prefix') consistency_prefix = parsed_configuration.get('consistency', {}).get('prefix')
if archive_name_format and not consistency_prefix: if archive_name_format and not consistency_prefix:
logger.warning('Since version 1.1.16, if you provide `archive_name_format`, you should also' logger.warning(
' specify `consistency.prefix`.') 'Since version 1.1.16, if you provide `archive_name_format`, you should also'
' specify `consistency.prefix`.'
)
def parse_configuration(config_filename, schema_filename): def parse_configuration(config_filename, schema_filename):

View file

@ -20,9 +20,12 @@ def test_parse_arguments_with_filename_arguments_overrides_defaults():
flexmock(os.path).should_receive('exists').and_return(True) flexmock(os.path).should_receive('exists').and_return(True)
parser = module.parse_arguments( parser = module.parse_arguments(
'--source-config', 'config', '--source-config',
'--source-excludes', 'excludes', 'config',
'--destination-config', 'config.yaml', '--source-excludes',
'excludes',
'--destination-config',
'config.yaml',
) )
assert parser.source_config_filename == 'config' assert parser.source_config_filename == 'config'

View file

@ -6,6 +6,7 @@ def test_parse_arguments_with_no_arguments_uses_defaults():
assert parser.destination_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME assert parser.destination_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME
def test_parse_arguments_with_filename_argument_overrides_defaults(): def test_parse_arguments_with_filename_argument_overrides_defaults():
parser = module.parse_arguments('--destination', 'config.yaml') parser = module.parse_arguments('--destination', 'config.yaml')

View file

@ -11,7 +11,7 @@ from borgmatic.config import generate as module
def test_insert_newline_before_comment_does_not_raise(): def test_insert_newline_before_comment_does_not_raise():
field_name = 'foo' field_name = 'foo'
config = module.yaml.comments.CommentedMap([(field_name, 33)]) config = module.yaml.comments.CommentedMap([(field_name, 33)])
config.yaml_set_comment_before_after_key(key=field_name, before='Comment',) config.yaml_set_comment_before_after_key(key=field_name, before='Comment')
module._insert_newline_before_comment(config, field_name) module._insert_newline_before_comment(config, field_name)
@ -109,12 +109,7 @@ def test_write_configuration_with_already_existing_directory_does_not_raise():
def test_add_comments_to_configuration_does_not_raise(): def test_add_comments_to_configuration_does_not_raise():
# Ensure that it can deal with fields both in the schema and missing from the schema. # Ensure that it can deal with fields both in the schema and missing from the schema.
config = module.yaml.comments.CommentedMap([('foo', 33), ('bar', 44), ('baz', 55)]) config = module.yaml.comments.CommentedMap([('foo', 33), ('bar', 44), ('baz', 55)])
schema = { schema = {'map': {'foo': {'desc': 'Foo'}, 'bar': {'desc': 'Bar'}}}
'map': {
'foo': {'desc': 'Foo'},
'bar': {'desc': 'Bar'},
}
}
module.add_comments_to_configuration(config, schema) module.add_comments_to_configuration(config, schema)

View file

@ -11,14 +11,9 @@ def test_parse_section_options_with_punctuation_should_return_section_options():
parser.read_file(StringIO('[section]\nfoo: {}\n'.format(string.punctuation))) parser.read_file(StringIO('[section]\nfoo: {}\n'.format(string.punctuation)))
section_format = module.Section_format( section_format = module.Section_format(
'section', 'section', (module.Config_option('foo', str, required=True),)
(module.Config_option('foo', str, required=True),),
) )
config = module.parse_section_options(parser, section_format) config = module.parse_section_options(parser, section_format)
assert config == OrderedDict( assert config == OrderedDict((('foo', string.punctuation),))
(
('foo', string.punctuation),
)
)

View file

@ -75,7 +75,9 @@ def test_parse_configuration_passes_through_quoted_punctuation():
repositories: repositories:
- "{}.borg" - "{}.borg"
'''.format(escaped_punctuation) '''.format(
escaped_punctuation
)
) )
result = module.parse_configuration('config.yaml', 'schema.yaml') result = module.parse_configuration('config.yaml', 'schema.yaml')
@ -84,7 +86,7 @@ def test_parse_configuration_passes_through_quoted_punctuation():
'location': { 'location': {
'source_directories': ['/home'], 'source_directories': ['/home'],
'repositories': ['{}.borg'.format(string.punctuation)], 'repositories': ['{}.borg'.format(string.punctuation)],
}, }
} }
@ -111,7 +113,7 @@ def test_parse_configuration_with_schema_lacking_examples_does_not_raise():
required: true required: true
seq: seq:
- type: scalar - type: scalar
''' ''',
) )
module.parse_configuration('config.yaml', 'schema.yaml') module.parse_configuration('config.yaml', 'schema.yaml')

View file

@ -7,6 +7,7 @@ import pytest
from borgmatic.borg import check as module from borgmatic.borg import check as module
from borgmatic.tests.unit.test_verbosity import insert_logging_mock from borgmatic.tests.unit.test_verbosity import insert_logging_mock
def insert_subprocess_mock(check_call_command, **kwargs): def insert_subprocess_mock(check_call_command, **kwargs):
subprocess = flexmock(module.subprocess) subprocess = flexmock(module.subprocess)
subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once() subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once()
@ -114,19 +115,16 @@ def test_check_archives_calls_borg_with_parameters(checks):
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} consistency_config = {'check_last': check_last}
flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_parse_checks').and_return(checks)
flexmock(module).should_receive('_make_check_flags').with_args(checks, check_last, None).and_return(()) flexmock(module).should_receive('_make_check_flags').with_args(
checks, check_last, None
).and_return(())
stdout = flexmock() stdout = flexmock()
insert_subprocess_mock( insert_subprocess_mock(('borg', 'check', 'repo'), stdout=stdout, stderr=STDOUT)
('borg', 'check', 'repo'),
stdout=stdout, stderr=STDOUT,
)
flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout)
flexmock(module.os).should_receive('devnull') flexmock(module.os).should_receive('devnull')
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={}, consistency_config=consistency_config
storage_config={},
consistency_config=consistency_config,
) )
@ -140,9 +138,7 @@ def test_check_archives_with_extract_check_calls_extract_only():
insert_subprocess_never() insert_subprocess_never()
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={}, consistency_config=consistency_config
storage_config={},
consistency_config=consistency_config,
) )
@ -152,15 +148,10 @@ def test_check_archives_with_log_info_calls_borg_with_info_parameter():
flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_parse_checks').and_return(checks)
flexmock(module).should_receive('_make_check_flags').and_return(()) flexmock(module).should_receive('_make_check_flags').and_return(())
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
insert_subprocess_mock( insert_subprocess_mock(('borg', 'check', 'repo', '--info'), stdout=None, stderr=STDOUT)
('borg', 'check', 'repo', '--info'),
stdout=None, stderr=STDOUT,
)
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={}, consistency_config=consistency_config
storage_config={},
consistency_config=consistency_config,
) )
@ -171,14 +162,11 @@ def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
flexmock(module).should_receive('_make_check_flags').and_return(()) flexmock(module).should_receive('_make_check_flags').and_return(())
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
insert_subprocess_mock( insert_subprocess_mock(
('borg', 'check', 'repo', '--debug', '--show-rc'), ('borg', 'check', 'repo', '--debug', '--show-rc'), stdout=None, stderr=STDOUT
stdout=None, stderr=STDOUT,
) )
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={}, consistency_config=consistency_config
storage_config={},
consistency_config=consistency_config,
) )
@ -188,9 +176,7 @@ def test_check_archives_without_any_checks_bails():
insert_subprocess_never() insert_subprocess_never()
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={}, consistency_config=consistency_config
storage_config={},
consistency_config=consistency_config,
) )
@ -199,12 +185,11 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} consistency_config = {'check_last': check_last}
flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_parse_checks').and_return(checks)
flexmock(module).should_receive('_make_check_flags').with_args(checks, check_last, None).and_return(()) flexmock(module).should_receive('_make_check_flags').with_args(
checks, check_last, None
).and_return(())
stdout = flexmock() stdout = flexmock()
insert_subprocess_mock( insert_subprocess_mock(('borg1', 'check', 'repo'), stdout=stdout, stderr=STDOUT)
('borg1', 'check', 'repo'),
stdout=stdout, stderr=STDOUT,
)
flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout)
flexmock(module.os).should_receive('devnull') flexmock(module.os).should_receive('devnull')
@ -221,11 +206,12 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} consistency_config = {'check_last': check_last}
flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_parse_checks').and_return(checks)
flexmock(module).should_receive('_make_check_flags').with_args(checks, check_last, None).and_return(()) flexmock(module).should_receive('_make_check_flags').with_args(
checks, check_last, None
).and_return(())
stdout = flexmock() stdout = flexmock()
insert_subprocess_mock( insert_subprocess_mock(
('borg', 'check', 'repo', '--remote-path', 'borg1'), ('borg', 'check', 'repo', '--remote-path', 'borg1'), stdout=stdout, stderr=STDOUT
stdout=stdout, stderr=STDOUT,
) )
flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout)
flexmock(module.os).should_receive('devnull') flexmock(module.os).should_receive('devnull')
@ -243,19 +229,18 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
check_last = flexmock() check_last = flexmock()
consistency_config = {'check_last': check_last} consistency_config = {'check_last': check_last}
flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_parse_checks').and_return(checks)
flexmock(module).should_receive('_make_check_flags').with_args(checks, check_last, None).and_return(()) flexmock(module).should_receive('_make_check_flags').with_args(
checks, check_last, None
).and_return(())
stdout = flexmock() stdout = flexmock()
insert_subprocess_mock( insert_subprocess_mock(
('borg', 'check', 'repo', '--lock-wait', '5'), ('borg', 'check', 'repo', '--lock-wait', '5'), stdout=stdout, stderr=STDOUT
stdout=stdout, stderr=STDOUT,
) )
flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout)
flexmock(module.os).should_receive('devnull') flexmock(module.os).should_receive('devnull')
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={'lock_wait': 5}, consistency_config=consistency_config
storage_config={'lock_wait': 5},
consistency_config=consistency_config,
) )
@ -265,18 +250,15 @@ def test_check_archives_with_retention_prefix():
prefix = 'foo-' prefix = 'foo-'
consistency_config = {'check_last': check_last, 'prefix': prefix} consistency_config = {'check_last': check_last, 'prefix': prefix}
flexmock(module).should_receive('_parse_checks').and_return(checks) flexmock(module).should_receive('_parse_checks').and_return(checks)
flexmock(module).should_receive('_make_check_flags').with_args(checks, check_last, prefix).and_return(()) flexmock(module).should_receive('_make_check_flags').with_args(
checks, check_last, prefix
).and_return(())
stdout = flexmock() stdout = flexmock()
insert_subprocess_mock( insert_subprocess_mock(('borg', 'check', 'repo'), stdout=stdout, stderr=STDOUT)
('borg', 'check', 'repo'),
stdout=stdout, stderr=STDOUT,
)
flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout) flexmock(sys.modules['builtins']).should_receive('open').and_return(stdout)
flexmock(module.os).should_receive('devnull') flexmock(module.os).should_receive('devnull')
module.check_archives( module.check_archives(
repository='repo', repository='repo', storage_config={}, consistency_config=consistency_config
storage_config={},
consistency_config=consistency_config,
) )

View file

@ -71,8 +71,12 @@ def test_expand_directory_with_glob_expands():
def test_expand_directories_flattens_expanded_directories(): def test_expand_directories_flattens_expanded_directories():
flexmock(module).should_receive('_expand_directory').with_args('~/foo').and_return(['/root/foo']) flexmock(module).should_receive('_expand_directory').with_args('~/foo').and_return(
flexmock(module).should_receive('_expand_directory').with_args('bar*').and_return(['bar', 'barf']) ['/root/foo']
)
flexmock(module).should_receive('_expand_directory').with_args('bar*').and_return(
['bar', 'barf']
)
paths = module._expand_directories(('~/foo', 'bar*')) paths = module._expand_directories(('~/foo', 'bar*'))
@ -86,11 +90,7 @@ def test_expand_directories_considers_none_as_no_directories():
def test_write_pattern_file_does_not_raise(): def test_write_pattern_file_does_not_raise():
temporary_file = flexmock( temporary_file = flexmock(name='filename', write=lambda mode: None, flush=lambda: None)
name='filename',
write=lambda mode: None,
flush=lambda: None,
)
flexmock(module.tempfile).should_receive('NamedTemporaryFile').and_return(temporary_file) flexmock(module.tempfile).should_receive('NamedTemporaryFile').and_return(temporary_file)
module._write_pattern_file(['exclude']) module._write_pattern_file(['exclude'])
@ -107,8 +107,7 @@ def insert_subprocess_mock(check_call_command, **kwargs):
def test_make_pattern_flags_includes_pattern_filename_when_given(): def test_make_pattern_flags_includes_pattern_filename_when_given():
pattern_flags = module._make_pattern_flags( pattern_flags = module._make_pattern_flags(
location_config={'patterns': ['R /', '- /var']}, location_config={'patterns': ['R /', '- /var']}, pattern_filename='/tmp/patterns'
pattern_filename='/tmp/patterns',
) )
assert pattern_flags == ('--patterns-from', '/tmp/patterns') assert pattern_flags == ('--patterns-from', '/tmp/patterns')
@ -116,7 +115,7 @@ def test_make_pattern_flags_includes_pattern_filename_when_given():
def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config(): def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config():
pattern_flags = module._make_pattern_flags( pattern_flags = module._make_pattern_flags(
location_config={'patterns_from': ['patterns', 'other']}, location_config={'patterns_from': ['patterns', 'other']}
) )
assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', 'other') assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', 'other')
@ -124,25 +123,21 @@ def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config():
def test_make_pattern_flags_includes_both_filenames_when_patterns_given_and_patterns_from_in_config(): def test_make_pattern_flags_includes_both_filenames_when_patterns_given_and_patterns_from_in_config():
pattern_flags = module._make_pattern_flags( pattern_flags = module._make_pattern_flags(
location_config={'patterns_from': ['patterns']}, location_config={'patterns_from': ['patterns']}, pattern_filename='/tmp/patterns'
pattern_filename='/tmp/patterns',
) )
assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', '/tmp/patterns') assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', '/tmp/patterns')
def test_make_pattern_flags_considers_none_patterns_from_filenames_as_empty(): def test_make_pattern_flags_considers_none_patterns_from_filenames_as_empty():
pattern_flags = module._make_pattern_flags( pattern_flags = module._make_pattern_flags(location_config={'patterns_from': None})
location_config={'patterns_from': None},
)
assert pattern_flags == () assert pattern_flags == ()
def test_make_exclude_flags_includes_exclude_patterns_filename_when_given(): def test_make_exclude_flags_includes_exclude_patterns_filename_when_given():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(
location_config={'exclude_patterns': ['*.pyc', '/var']}, location_config={'exclude_patterns': ['*.pyc', '/var']}, exclude_filename='/tmp/excludes'
exclude_filename='/tmp/excludes',
) )
assert exclude_flags == ('--exclude-from', '/tmp/excludes') assert exclude_flags == ('--exclude-from', '/tmp/excludes')
@ -151,7 +146,7 @@ def test_make_exclude_flags_includes_exclude_patterns_filename_when_given():
def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config(): def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(
location_config={'exclude_from': ['excludes', 'other']}, location_config={'exclude_from': ['excludes', 'other']}
) )
assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', 'other') assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', 'other')
@ -159,41 +154,32 @@ def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config():
def test_make_exclude_flags_includes_both_filenames_when_patterns_given_and_exclude_from_in_config(): def test_make_exclude_flags_includes_both_filenames_when_patterns_given_and_exclude_from_in_config():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(
location_config={'exclude_from': ['excludes']}, location_config={'exclude_from': ['excludes']}, exclude_filename='/tmp/excludes'
exclude_filename='/tmp/excludes',
) )
assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', '/tmp/excludes') assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', '/tmp/excludes')
def test_make_exclude_flags_considers_none_exclude_from_filenames_as_empty(): def test_make_exclude_flags_considers_none_exclude_from_filenames_as_empty():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(location_config={'exclude_from': None})
location_config={'exclude_from': None},
)
assert exclude_flags == () assert exclude_flags == ()
def test_make_exclude_flags_includes_exclude_caches_when_true_in_config(): def test_make_exclude_flags_includes_exclude_caches_when_true_in_config():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(location_config={'exclude_caches': True})
location_config={'exclude_caches': True},
)
assert exclude_flags == ('--exclude-caches',) assert exclude_flags == ('--exclude-caches',)
def test_make_exclude_flags_does_not_include_exclude_caches_when_false_in_config(): def test_make_exclude_flags_does_not_include_exclude_caches_when_false_in_config():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(location_config={'exclude_caches': False})
location_config={'exclude_caches': False},
)
assert exclude_flags == () assert exclude_flags == ()
def test_make_exclude_flags_includes_exclude_if_present_when_in_config(): def test_make_exclude_flags_includes_exclude_if_present_when_in_config():
exclude_flags = module._make_exclude_flags( exclude_flags = module._make_exclude_flags(location_config={'exclude_if_present': 'exclude_me'})
location_config={'exclude_if_present': 'exclude_me'},
)
assert exclude_flags == ('--exclude-if-present', 'exclude_me') assert exclude_flags == ('--exclude-if-present', 'exclude_me')
@ -230,7 +216,9 @@ def test_create_archive_calls_borg_with_parameters():
def test_create_archive_with_patterns_calls_borg_with_patterns(): def test_create_archive_with_patterns_calls_borg_with_patterns():
pattern_flags = ('--patterns-from', 'patterns') pattern_flags = ('--patterns-from', 'patterns')
flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar')).and_return(()) flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar')).and_return(())
flexmock(module).should_receive('_write_pattern_file').and_return(flexmock(name='/tmp/patterns')).and_return(None) flexmock(module).should_receive('_write_pattern_file').and_return(
flexmock(name='/tmp/patterns')
).and_return(None)
flexmock(module).should_receive('_make_pattern_flags').and_return(pattern_flags) flexmock(module).should_receive('_make_pattern_flags').and_return(pattern_flags)
flexmock(module).should_receive('_make_exclude_flags').and_return(()) flexmock(module).should_receive('_make_exclude_flags').and_return(())
insert_subprocess_mock(CREATE_COMMAND + pattern_flags) insert_subprocess_mock(CREATE_COMMAND + pattern_flags)
@ -249,8 +237,12 @@ def test_create_archive_with_patterns_calls_borg_with_patterns():
def test_create_archive_with_exclude_patterns_calls_borg_with_excludes(): def test_create_archive_with_exclude_patterns_calls_borg_with_excludes():
exclude_flags = ('--exclude-from', 'excludes') exclude_flags = ('--exclude-from', 'excludes')
flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar')).and_return(('exclude',)) flexmock(module).should_receive('_expand_directories').and_return(('foo', 'bar')).and_return(
flexmock(module).should_receive('_write_pattern_file').and_return(None).and_return(flexmock(name='/tmp/excludes')) ('exclude',)
)
flexmock(module).should_receive('_write_pattern_file').and_return(None).and_return(
flexmock(name='/tmp/excludes')
)
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(exclude_flags) flexmock(module).should_receive('_make_exclude_flags').and_return(exclude_flags)
insert_subprocess_mock(CREATE_COMMAND + exclude_flags) insert_subprocess_mock(CREATE_COMMAND + exclude_flags)
@ -273,7 +265,7 @@ def test_create_archive_with_log_info_calls_borg_with_info_parameter():
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(()) flexmock(module).should_receive('_make_exclude_flags').and_return(())
insert_subprocess_mock(CREATE_COMMAND + ('--list', '--filter', 'AME', '--info', '--stats',)) insert_subprocess_mock(CREATE_COMMAND + ('--list', '--filter', 'AME', '--info', '--stats'))
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.create_archive( module.create_archive(
@ -293,7 +285,9 @@ def test_create_archive_with_log_debug_calls_borg_with_debug_parameter():
flexmock(module).should_receive('_write_pattern_file').and_return(None) flexmock(module).should_receive('_write_pattern_file').and_return(None)
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(()) flexmock(module).should_receive('_make_exclude_flags').and_return(())
insert_subprocess_mock(CREATE_COMMAND + ('--list', '--filter', 'AME','--stats', '--debug', '--show-rc')) insert_subprocess_mock(
CREATE_COMMAND + ('--list', '--filter', 'AME', '--stats', '--debug', '--show-rc')
)
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.create_archive( module.create_archive(
@ -359,7 +353,9 @@ def test_create_archive_with_dry_run_and_log_debug_calls_borg_without_stats_para
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(()) flexmock(module).should_receive('_make_exclude_flags').and_return(())
insert_subprocess_mock(CREATE_COMMAND + ('--list', '--filter', 'AME', '--debug', '--show-rc', '--dry-run')) insert_subprocess_mock(
CREATE_COMMAND + ('--list', '--filter', 'AME', '--debug', '--show-rc', '--dry-run')
)
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.create_archive( module.create_archive(
@ -625,16 +621,20 @@ def test_create_archive_with_json_calls_borg_with_json_parameter():
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={}, storage_config={},
json=True json=True,
) )
def test_create_archive_with_source_directories_glob_expands(): def test_create_archive_with_source_directories_glob_expands():
flexmock(module).should_receive('_expand_directories').and_return(('foo', 'food')).and_return(()) flexmock(module).should_receive('_expand_directories').and_return(('foo', 'food')).and_return(
()
)
flexmock(module).should_receive('_write_pattern_file').and_return(None) flexmock(module).should_receive('_write_pattern_file').and_return(None)
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(()) flexmock(module).should_receive('_make_exclude_flags').and_return(())
insert_subprocess_mock(('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food')) insert_subprocess_mock(
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food')
)
flexmock(module.glob).should_receive('glob').with_args('foo*').and_return(['foo', 'food']) flexmock(module.glob).should_receive('glob').with_args('foo*').and_return(['foo', 'food'])
module.create_archive( module.create_archive(
@ -670,11 +670,15 @@ def test_create_archive_with_non_matching_source_directories_glob_passes_through
def test_create_archive_with_glob_calls_borg_with_expanded_directories(): def test_create_archive_with_glob_calls_borg_with_expanded_directories():
flexmock(module).should_receive('_expand_directories').and_return(('foo', 'food')).and_return(()) flexmock(module).should_receive('_expand_directories').and_return(('foo', 'food')).and_return(
()
)
flexmock(module).should_receive('_write_pattern_file').and_return(None) flexmock(module).should_receive('_write_pattern_file').and_return(None)
flexmock(module).should_receive('_make_pattern_flags').and_return(()) flexmock(module).should_receive('_make_pattern_flags').and_return(())
flexmock(module).should_receive('_make_exclude_flags').and_return(()) flexmock(module).should_receive('_make_exclude_flags').and_return(())
insert_subprocess_mock(('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food')) insert_subprocess_mock(
('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food')
)
module.create_archive( module.create_archive(
dry_run=False, dry_run=False,
@ -703,9 +707,7 @@ def test_create_archive_with_archive_name_format_calls_borg_with_archive_name():
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={ storage_config={'archive_name_format': 'ARCHIVE_NAME'},
'archive_name_format': 'ARCHIVE_NAME',
},
) )
@ -724,7 +726,5 @@ def test_create_archive_with_archive_name_format_accepts_borg_placeholders():
'repositories': ['repo'], 'repositories': ['repo'],
'exclude_patterns': None, 'exclude_patterns': None,
}, },
storage_config={ storage_config={'archive_name_format': 'Documents_{hostname}-{now}'},
'archive_name_format': 'Documents_{hostname}-{now}',
},
) )

View file

@ -19,54 +19,40 @@ def insert_subprocess_never():
def insert_subprocess_check_output_mock(check_output_command, result, **kwargs): def insert_subprocess_check_output_mock(check_output_command, result, **kwargs):
subprocess = flexmock(module.subprocess) subprocess = flexmock(module.subprocess)
subprocess.should_receive('check_output').with_args(check_output_command, **kwargs).and_return(result).once() subprocess.should_receive('check_output').with_args(check_output_command, **kwargs).and_return(
result
).once()
def test_extract_last_archive_dry_run_should_call_borg_with_last_archive(): def test_extract_last_archive_dry_run_should_call_borg_with_last_archive():
flexmock(sys.stdout).encoding = 'utf-8' flexmock(sys.stdout).encoding = 'utf-8'
insert_subprocess_check_output_mock( insert_subprocess_check_output_mock(
('borg', 'list', '--short', 'repo'), ('borg', 'list', '--short', 'repo'), result='archive1\narchive2\n'.encode('utf-8')
result='archive1\narchive2\n'.encode('utf-8'),
)
insert_subprocess_mock(
('borg', 'extract', '--dry-run', 'repo::archive2'),
) )
insert_subprocess_mock(('borg', 'extract', '--dry-run', 'repo::archive2'))
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
repository='repo',
lock_wait=None,
)
def test_extract_last_archive_dry_run_without_any_archives_should_bail(): def test_extract_last_archive_dry_run_without_any_archives_should_bail():
flexmock(sys.stdout).encoding = 'utf-8' flexmock(sys.stdout).encoding = 'utf-8'
insert_subprocess_check_output_mock( insert_subprocess_check_output_mock(
('borg', 'list', '--short', 'repo'), ('borg', 'list', '--short', 'repo'), result='\n'.encode('utf-8')
result='\n'.encode('utf-8'),
) )
insert_subprocess_never() insert_subprocess_never()
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
repository='repo',
lock_wait=None,
)
def test_extract_last_archive_dry_run_with_log_info_should_call_borg_with_info_parameter(): def test_extract_last_archive_dry_run_with_log_info_should_call_borg_with_info_parameter():
flexmock(sys.stdout).encoding = 'utf-8' flexmock(sys.stdout).encoding = 'utf-8'
insert_subprocess_check_output_mock( insert_subprocess_check_output_mock(
('borg', 'list', '--short', 'repo', '--info'), ('borg', 'list', '--short', 'repo', '--info'), result='archive1\narchive2\n'.encode('utf-8')
result='archive1\narchive2\n'.encode('utf-8'),
)
insert_subprocess_mock(
('borg', 'extract', '--dry-run', 'repo::archive2', '--info'),
) )
insert_subprocess_mock(('borg', 'extract', '--dry-run', 'repo::archive2', '--info'))
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
repository='repo',
lock_wait=None,
)
def test_extract_last_archive_dry_run_with_log_debug_should_call_borg_with_debug_parameter(): def test_extract_last_archive_dry_run_with_log_debug_should_call_borg_with_debug_parameter():
@ -76,31 +62,21 @@ def test_extract_last_archive_dry_run_with_log_debug_should_call_borg_with_debug
result='archive1\narchive2\n'.encode('utf-8'), result='archive1\narchive2\n'.encode('utf-8'),
) )
insert_subprocess_mock( insert_subprocess_mock(
('borg', 'extract', '--dry-run', 'repo::archive2', '--debug', '--show-rc', '--list'), ('borg', 'extract', '--dry-run', 'repo::archive2', '--debug', '--show-rc', '--list')
) )
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=None)
repository='repo',
lock_wait=None,
)
def test_extract_last_archive_dry_run_should_call_borg_via_local_path(): def test_extract_last_archive_dry_run_should_call_borg_via_local_path():
flexmock(sys.stdout).encoding = 'utf-8' flexmock(sys.stdout).encoding = 'utf-8'
insert_subprocess_check_output_mock( insert_subprocess_check_output_mock(
('borg1', 'list', '--short', 'repo'), ('borg1', 'list', '--short', 'repo'), result='archive1\narchive2\n'.encode('utf-8')
result='archive1\narchive2\n'.encode('utf-8'),
)
insert_subprocess_mock(
('borg1', 'extract', '--dry-run', 'repo::archive2'),
) )
insert_subprocess_mock(('borg1', 'extract', '--dry-run', 'repo::archive2'))
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=None, local_path='borg1')
repository='repo',
lock_wait=None,
local_path='borg1',
)
def test_extract_last_archive_dry_run_should_call_borg_with_remote_path_parameters(): def test_extract_last_archive_dry_run_should_call_borg_with_remote_path_parameters():
@ -110,14 +86,10 @@ def test_extract_last_archive_dry_run_should_call_borg_with_remote_path_paramete
result='archive1\narchive2\n'.encode('utf-8'), result='archive1\narchive2\n'.encode('utf-8'),
) )
insert_subprocess_mock( insert_subprocess_mock(
('borg', 'extract', '--dry-run', 'repo::archive2', '--remote-path', 'borg1'), ('borg', 'extract', '--dry-run', 'repo::archive2', '--remote-path', 'borg1')
) )
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=None, remote_path='borg1')
repository='repo',
lock_wait=None,
remote_path='borg1',
)
def test_extract_last_archive_dry_run_should_call_borg_with_lock_wait_parameters(): def test_extract_last_archive_dry_run_should_call_borg_with_lock_wait_parameters():
@ -126,11 +98,6 @@ def test_extract_last_archive_dry_run_should_call_borg_with_lock_wait_parameters
('borg', 'list', '--short', 'repo', '--lock-wait', '5'), ('borg', 'list', '--short', 'repo', '--lock-wait', '5'),
result='archive1\narchive2\n'.encode('utf-8'), result='archive1\narchive2\n'.encode('utf-8'),
) )
insert_subprocess_mock( insert_subprocess_mock(('borg', 'extract', '--dry-run', 'repo::archive2', '--lock-wait', '5'))
('borg', 'extract', '--dry-run', 'repo::archive2', '--lock-wait', '5'),
)
module.extract_last_archive_dry_run( module.extract_last_archive_dry_run(repository='repo', lock_wait=5)
repository='repo',
lock_wait=5,
)

View file

@ -18,66 +18,42 @@ INFO_COMMAND = ('borg', 'info', 'repo')
def test_display_archives_info_calls_borg_with_parameters(): def test_display_archives_info_calls_borg_with_parameters():
insert_subprocess_mock(INFO_COMMAND) insert_subprocess_mock(INFO_COMMAND)
module.display_archives_info( module.display_archives_info(repository='repo', storage_config={})
repository='repo',
storage_config={},
)
def test_display_archives_info_with_log_info_calls_borg_with_info_parameter(): def test_display_archives_info_with_log_info_calls_borg_with_info_parameter():
insert_subprocess_mock(INFO_COMMAND + ('--info',)) insert_subprocess_mock(INFO_COMMAND + ('--info',))
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.display_archives_info( module.display_archives_info(repository='repo', storage_config={})
repository='repo',
storage_config={},
)
def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter(): def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter():
insert_subprocess_mock(INFO_COMMAND + ('--debug', '--show-rc')) insert_subprocess_mock(INFO_COMMAND + ('--debug', '--show-rc'))
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.display_archives_info( module.display_archives_info(repository='repo', storage_config={})
repository='repo',
storage_config={},
)
def test_display_archives_info_with_json_calls_borg_with_json_parameter(): def test_display_archives_info_with_json_calls_borg_with_json_parameter():
insert_subprocess_mock(INFO_COMMAND + ('--json',)) insert_subprocess_mock(INFO_COMMAND + ('--json',))
module.display_archives_info( module.display_archives_info(repository='repo', storage_config={}, json=True)
repository='repo',
storage_config={},
json=True,
)
def test_display_archives_info_with_local_path_calls_borg_via_local_path(): def test_display_archives_info_with_local_path_calls_borg_via_local_path():
insert_subprocess_mock(('borg1',) + INFO_COMMAND[1:]) insert_subprocess_mock(('borg1',) + INFO_COMMAND[1:])
module.display_archives_info( module.display_archives_info(repository='repo', storage_config={}, local_path='borg1')
repository='repo',
storage_config={},
local_path='borg1',
)
def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_parameters(): def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_parameters():
insert_subprocess_mock(INFO_COMMAND + ('--remote-path', 'borg1')) insert_subprocess_mock(INFO_COMMAND + ('--remote-path', 'borg1'))
module.display_archives_info( module.display_archives_info(repository='repo', storage_config={}, remote_path='borg1')
repository='repo',
storage_config={},
remote_path='borg1',
)
def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_parameters(): def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config = {'lock_wait': 5} storage_config = {'lock_wait': 5}
insert_subprocess_mock(INFO_COMMAND + ('--lock-wait', '5')) insert_subprocess_mock(INFO_COMMAND + ('--lock-wait', '5'))
module.display_archives_info( module.display_archives_info(repository='repo', storage_config=storage_config)
repository='repo',
storage_config=storage_config,
)

View file

@ -18,67 +18,43 @@ LIST_COMMAND = ('borg', 'list', 'repo')
def test_list_archives_calls_borg_with_parameters(): def test_list_archives_calls_borg_with_parameters():
insert_subprocess_mock(LIST_COMMAND) insert_subprocess_mock(LIST_COMMAND)
module.list_archives( module.list_archives(repository='repo', storage_config={})
repository='repo',
storage_config={},
)
def test_list_archives_with_log_info_calls_borg_with_info_parameter(): def test_list_archives_with_log_info_calls_borg_with_info_parameter():
insert_subprocess_mock(LIST_COMMAND + ('--info',)) insert_subprocess_mock(LIST_COMMAND + ('--info',))
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.list_archives( module.list_archives(repository='repo', storage_config={})
repository='repo',
storage_config={},
)
def test_list_archives_with_log_debug_calls_borg_with_debug_parameter(): def test_list_archives_with_log_debug_calls_borg_with_debug_parameter():
insert_subprocess_mock(LIST_COMMAND + ('--debug', '--show-rc')) insert_subprocess_mock(LIST_COMMAND + ('--debug', '--show-rc'))
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.list_archives( module.list_archives(repository='repo', storage_config={})
repository='repo',
storage_config={},
)
def test_list_archives_with_json_calls_borg_with_json_parameter(): def test_list_archives_with_json_calls_borg_with_json_parameter():
insert_subprocess_mock(LIST_COMMAND + ('--json',)) insert_subprocess_mock(LIST_COMMAND + ('--json',))
module.list_archives( module.list_archives(repository='repo', storage_config={}, json=True)
repository='repo',
storage_config={},
json=True,
)
def test_list_archives_with_local_path_calls_borg_via_local_path(): def test_list_archives_with_local_path_calls_borg_via_local_path():
insert_subprocess_mock(('borg1',) + LIST_COMMAND[1:]) insert_subprocess_mock(('borg1',) + LIST_COMMAND[1:])
module.list_archives( module.list_archives(repository='repo', storage_config={}, local_path='borg1')
repository='repo',
storage_config={},
local_path='borg1',
)
def test_list_archives_with_remote_path_calls_borg_with_remote_path_parameters(): def test_list_archives_with_remote_path_calls_borg_with_remote_path_parameters():
insert_subprocess_mock(LIST_COMMAND + ('--remote-path', 'borg1')) insert_subprocess_mock(LIST_COMMAND + ('--remote-path', 'borg1'))
module.list_archives( module.list_archives(repository='repo', storage_config={}, remote_path='borg1')
repository='repo',
storage_config={},
remote_path='borg1',
)
def test_list_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): def test_list_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config = {'lock_wait': 5} storage_config = {'lock_wait': 5}
insert_subprocess_mock(LIST_COMMAND + ('--lock-wait', '5')) insert_subprocess_mock(LIST_COMMAND + ('--lock-wait', '5'))
module.list_archives( module.list_archives(repository='repo', storage_config=storage_config)
repository='repo',
storage_config=storage_config,
)

View file

@ -12,22 +12,11 @@ def insert_subprocess_mock(check_call_command, **kwargs):
subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once() subprocess.should_receive('check_call').with_args(check_call_command, **kwargs).once()
BASE_PRUNE_FLAGS = (('--keep-daily', '1'), ('--keep-weekly', '2'), ('--keep-monthly', '3'))
BASE_PRUNE_FLAGS = (
('--keep-daily', '1'),
('--keep-weekly', '2'),
('--keep-monthly', '3'),
)
def test_make_prune_flags_returns_flags_from_config_plus_default_prefix(): def test_make_prune_flags_returns_flags_from_config_plus_default_prefix():
retention_config = OrderedDict( retention_config = OrderedDict((('keep_daily', 1), ('keep_weekly', 2), ('keep_monthly', 3)))
(
('keep_daily', 1),
('keep_weekly', 2),
('keep_monthly', 3),
)
)
result = module._make_prune_flags(retention_config) result = module._make_prune_flags(retention_config)
@ -35,94 +24,82 @@ def test_make_prune_flags_returns_flags_from_config_plus_default_prefix():
def test_make_prune_flags_accepts_prefix_with_placeholders(): def test_make_prune_flags_accepts_prefix_with_placeholders():
retention_config = OrderedDict( retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}')))
(
('keep_daily', 1),
('prefix', 'Documents_{hostname}-{now}'),
)
)
result = module._make_prune_flags(retention_config) result = module._make_prune_flags(retention_config)
expected = ( expected = (('--keep-daily', '1'), ('--prefix', 'Documents_{hostname}-{now}'))
('--keep-daily', '1'),
('--prefix', 'Documents_{hostname}-{now}'),
)
assert tuple(result) == expected assert tuple(result) == expected
PRUNE_COMMAND = ( PRUNE_COMMAND = (
'borg', 'prune', 'repo', '--keep-daily', '1', '--keep-weekly', '2', '--keep-monthly', '3', 'borg',
'prune',
'repo',
'--keep-daily',
'1',
'--keep-weekly',
'2',
'--keep-monthly',
'3',
) )
def test_prune_archives_calls_borg_with_parameters(): def test_prune_archives_calls_borg_with_parameters():
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND) insert_subprocess_mock(PRUNE_COMMAND)
module.prune_archives( module.prune_archives(
dry_run=False, dry_run=False, repository='repo', storage_config={}, retention_config=retention_config
repository='repo',
storage_config={},
retention_config=retention_config,
) )
def test_prune_archives_with_log_info_calls_borg_with_info_parameter(): def test_prune_archives_with_log_info_calls_borg_with_info_parameter():
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND + ('--stats', '--info',)) insert_subprocess_mock(PRUNE_COMMAND + ('--stats', '--info'))
insert_logging_mock(logging.INFO) insert_logging_mock(logging.INFO)
module.prune_archives( module.prune_archives(
repository='repo', repository='repo', storage_config={}, dry_run=False, retention_config=retention_config
storage_config={},
dry_run=False,
retention_config=retention_config,
) )
def test_prune_archives_with_log_debug_calls_borg_with_debug_parameter(): def test_prune_archives_with_log_debug_calls_borg_with_debug_parameter():
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND + ('--stats', '--debug', '--list', '--show-rc')) insert_subprocess_mock(PRUNE_COMMAND + ('--stats', '--debug', '--list', '--show-rc'))
insert_logging_mock(logging.DEBUG) insert_logging_mock(logging.DEBUG)
module.prune_archives( module.prune_archives(
repository='repo', repository='repo', storage_config={}, dry_run=False, retention_config=retention_config
storage_config={},
dry_run=False,
retention_config=retention_config,
) )
def test_prune_archives_with_dry_run_calls_borg_with_dry_run_parameter(): def test_prune_archives_with_dry_run_calls_borg_with_dry_run_parameter():
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND + ('--dry-run',)) insert_subprocess_mock(PRUNE_COMMAND + ('--dry-run',))
module.prune_archives( module.prune_archives(
repository='repo', repository='repo', storage_config={}, dry_run=True, retention_config=retention_config
storage_config={},
dry_run=True,
retention_config=retention_config,
) )
def test_prune_archives_with_local_path_calls_borg_via_local_path(): def test_prune_archives_with_local_path_calls_borg_via_local_path():
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(('borg1',) + PRUNE_COMMAND[1:]) insert_subprocess_mock(('borg1',) + PRUNE_COMMAND[1:])
@ -138,7 +115,7 @@ def test_prune_archives_with_local_path_calls_borg_via_local_path():
def test_prune_archives_with_remote_path_calls_borg_with_remote_path_parameters(): def test_prune_archives_with_remote_path_calls_borg_with_remote_path_parameters():
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND + ('--remote-path', 'borg1')) insert_subprocess_mock(PRUNE_COMMAND + ('--remote-path', 'borg1'))
@ -155,7 +132,7 @@ def test_prune_archives_with_umask_calls_borg_with_umask_parameters():
storage_config = {'umask': '077'} storage_config = {'umask': '077'}
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND + ('--umask', '077')) insert_subprocess_mock(PRUNE_COMMAND + ('--umask', '077'))
@ -171,7 +148,7 @@ def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
storage_config = {'lock_wait': 5} storage_config = {'lock_wait': 5}
retention_config = flexmock() retention_config = flexmock()
flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return( flexmock(module).should_receive('_make_prune_flags').with_args(retention_config).and_return(
BASE_PRUNE_FLAGS, BASE_PRUNE_FLAGS
) )
insert_subprocess_mock(PRUNE_COMMAND + ('--lock-wait', '5')) insert_subprocess_mock(PRUNE_COMMAND + ('--lock-wait', '5'))

View file

@ -13,9 +13,9 @@ def test__run_commands_handles_multiple_json_outputs_in_array():
.should_receive('_run_commands_on_repository') .should_receive('_run_commands_on_repository')
.times(3) .times(3)
.replace_with( .replace_with(
lambda args, consistency, json_results, local_path, location, remote_path, retention, lambda args, consistency, json_results, local_path, location, remote_path, retention, storage, unexpanded_repository: json_results.append(
storage, {"whatever": unexpanded_repository}
unexpanded_repository: json_results.append({"whatever": unexpanded_repository}) )
) )
) )
@ -31,7 +31,7 @@ def test__run_commands_handles_multiple_json_outputs_in_array():
{"whatever": "fake_repo2"}, {"whatever": "fake_repo2"},
{"whatever": "fake_repo3"} {"whatever": "fake_repo3"}
] ]
''', '''
) )
) )
) )
@ -41,11 +41,7 @@ def test__run_commands_handles_multiple_json_outputs_in_array():
args=flexmock(json=True), args=flexmock(json=True),
consistency=None, consistency=None,
local_path=None, local_path=None,
location={'repositories': [ location={'repositories': ['fake_repo1', 'fake_repo2', 'fake_repo3']},
'fake_repo1',
'fake_repo2',
'fake_repo3'
]},
remote_path=None, remote_path=None,
retention=None, retention=None,
storage=None, storage=None,

View file

@ -33,25 +33,31 @@ def test_convert_legacy_parsed_config_transforms_source_config_to_mapping():
destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema) destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema)
assert destination_config == OrderedDict([ assert destination_config == OrderedDict(
[
( (
'location', 'location',
OrderedDict([ OrderedDict(
[
('source_directories', ['/home']), ('source_directories', ['/home']),
('repositories', ['hostname.borg']), ('repositories', ['hostname.borg']),
('exclude_patterns', ['/var']), ('exclude_patterns', ['/var']),
]), ]
),
), ),
('storage', OrderedDict([('encryption_passphrase', 'supersecret')])), ('storage', OrderedDict([('encryption_passphrase', 'supersecret')])),
('retention', OrderedDict([('keep_daily', 7)])), ('retention', OrderedDict([('keep_daily', 7)])),
('consistency', OrderedDict([('checks', ['repository'])])), ('consistency', OrderedDict([('checks', ['repository'])])),
]) ]
)
def test_convert_legacy_parsed_config_splits_space_separated_values(): def test_convert_legacy_parsed_config_splits_space_separated_values():
flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict)
source_config = Parsed_config( source_config = Parsed_config(
location=OrderedDict([('source_directories', '/home /etc'), ('repository', 'hostname.borg')]), location=OrderedDict(
[('source_directories', '/home /etc'), ('repository', 'hostname.borg')]
),
storage=OrderedDict(), storage=OrderedDict(),
retention=OrderedDict(), retention=OrderedDict(),
consistency=OrderedDict([('checks', 'repository archives')]), consistency=OrderedDict([('checks', 'repository archives')]),
@ -61,19 +67,23 @@ def test_convert_legacy_parsed_config_splits_space_separated_values():
destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema) destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema)
assert destination_config == OrderedDict([ assert destination_config == OrderedDict(
[
( (
'location', 'location',
OrderedDict([ OrderedDict(
[
('source_directories', ['/home', '/etc']), ('source_directories', ['/home', '/etc']),
('repositories', ['hostname.borg']), ('repositories', ['hostname.borg']),
('exclude_patterns', ['/var']), ('exclude_patterns', ['/var']),
]), ]
),
), ),
('storage', OrderedDict()), ('storage', OrderedDict()),
('retention', OrderedDict()), ('retention', OrderedDict()),
('consistency', OrderedDict([('checks', ['repository', 'archives'])])), ('consistency', OrderedDict([('checks', ['repository', 'archives'])])),
]) ]
)
def test_guard_configuration_upgraded_raises_when_only_source_config_present(): def test_guard_configuration_upgraded_raises_when_only_source_config_present():

View file

@ -9,41 +9,29 @@ def test_schema_to_sample_configuration_generates_config_with_examples():
flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict)
flexmock(module).should_receive('add_comments_to_configuration') flexmock(module).should_receive('add_comments_to_configuration')
schema = { schema = {
'map': OrderedDict([ 'map': OrderedDict(
[
('section1', {'map': {'field1': OrderedDict([('example', 'Example 1')])}}),
( (
'section1', { 'section2',
'map': { {
'field1': OrderedDict([ 'map': OrderedDict(
('example', 'Example 1') [
]),
},
},
),
(
'section2', {
'map': OrderedDict([
('field2', {'example': 'Example 2'}), ('field2', {'example': 'Example 2'}),
('field3', {'example': 'Example 3'}), ('field3', {'example': 'Example 3'}),
]), ]
} )
},
), ),
]) ]
)
} }
config = module._schema_to_sample_configuration(schema) config = module._schema_to_sample_configuration(schema)
assert config == OrderedDict([ assert config == OrderedDict(
( [
'section1', ('section1', OrderedDict([('field1', 'Example 1')])),
OrderedDict([ ('section2', OrderedDict([('field2', 'Example 2'), ('field3', 'Example 3')])),
('field1', 'Example 1'), ]
]),
),
(
'section2',
OrderedDict([
('field2', 'Example 2'),
('field3', 'Example 3'),
]),
) )
])

View file

@ -25,17 +25,9 @@ def test_validate_configuration_format_with_valid_config_should_not_raise():
parser.should_receive('options').with_args('other').and_return(('such',)) parser.should_receive('options').with_args('other').and_return(('such',))
config_format = ( config_format = (
module.Section_format( module.Section_format(
'section', 'section', options=(module.Config_option('stuff', str, required=True),)
options=(
module.Config_option('stuff', str, required=True),
),
),
module.Section_format(
'other',
options=(
module.Config_option('such', str, required=True),
),
), ),
module.Section_format('other', options=(module.Config_option('such', str, required=True),)),
) )
module.validate_configuration_format(parser, config_format) module.validate_configuration_format(parser, config_format)
@ -46,10 +38,7 @@ def test_validate_configuration_format_with_missing_required_section_should_rais
parser.should_receive('sections').and_return(('section',)) parser.should_receive('sections').and_return(('section',))
config_format = ( config_format = (
module.Section_format( module.Section_format(
'section', 'section', options=(module.Config_option('stuff', str, required=True),)
options=(
module.Config_option('stuff', str, required=True),
),
), ),
# At least one option in this section is required, so the section is required. # At least one option in this section is required, so the section is required.
module.Section_format( module.Section_format(
@ -71,10 +60,7 @@ def test_validate_configuration_format_with_missing_optional_section_should_not_
parser.should_receive('options').with_args('section').and_return(('stuff',)) parser.should_receive('options').with_args('section').and_return(('stuff',))
config_format = ( config_format = (
module.Section_format( module.Section_format(
'section', 'section', options=(module.Config_option('stuff', str, required=True),)
options=(
module.Config_option('stuff', str, required=True),
),
), ),
# No options in the section are required, so the section is optional. # No options in the section are required, so the section is optional.
module.Section_format( module.Section_format(
@ -92,9 +78,7 @@ def test_validate_configuration_format_with_missing_optional_section_should_not_
def test_validate_configuration_format_with_unknown_section_should_raise(): def test_validate_configuration_format_with_unknown_section_should_raise():
parser = flexmock() parser = flexmock()
parser.should_receive('sections').and_return(('section', 'extra')) parser.should_receive('sections').and_return(('section', 'extra'))
config_format = ( config_format = (module.Section_format('section', options=()),)
module.Section_format('section', options=()),
)
with pytest.raises(ValueError): with pytest.raises(ValueError):
module.validate_configuration_format(parser, config_format) module.validate_configuration_format(parser, config_format)
@ -141,8 +125,7 @@ def test_validate_configuration_format_with_extra_option_should_raise():
parser.should_receive('options').with_args('section').and_return(('option', 'extra')) parser.should_receive('options').with_args('section').and_return(('option', 'extra'))
config_format = ( config_format = (
module.Section_format( module.Section_format(
'section', 'section', options=(module.Config_option('option', str, required=True),)
options=(module.Config_option('option', str, required=True),),
), ),
) )
@ -168,12 +151,7 @@ def test_parse_section_options_should_return_section_options():
config = module.parse_section_options(parser, section_format) config = module.parse_section_options(parser, section_format)
assert config == OrderedDict( assert config == OrderedDict((('foo', 'value'), ('bar', 1)))
(
('foo', 'value'),
('bar', 1),
)
)
def test_parse_section_options_for_missing_section_should_return_empty_dict(): def test_parse_section_options_for_missing_section_should_return_empty_dict():
@ -210,13 +188,13 @@ def test_parse_configuration_should_return_section_configs():
config_format = (flexmock(name='items'), flexmock(name='things')) config_format = (flexmock(name='items'), flexmock(name='things'))
mock_module = flexmock(module) mock_module = flexmock(module)
mock_module.should_receive('validate_configuration_format').with_args( mock_module.should_receive('validate_configuration_format').with_args(
parser, config_format, parser, config_format
).once() ).once()
mock_section_configs = (flexmock(), flexmock()) mock_section_configs = (flexmock(), flexmock())
for section_format, section_config in zip(config_format, mock_section_configs): for section_format, section_config in zip(config_format, mock_section_configs):
mock_module.should_receive('parse_section_options').with_args( mock_module.should_receive('parse_section_options').with_args(
parser, section_format, parser, section_format
).and_return(section_config).once() ).and_return(section_config).once()
parsed_config = module.parse_configuration('filename', config_format) parsed_config = module.parse_configuration('filename', config_format)

View file

@ -24,6 +24,7 @@ def test_apply_logical_validation_raises_if_archive_name_format_present_without_
}, },
) )
def test_apply_logical_validation_raises_if_archive_name_format_present_without_retention_prefix(): def test_apply_logical_validation_raises_if_archive_name_format_present_without_retention_prefix():
with pytest.raises(module.Validation_error): with pytest.raises(module.Validation_error):
module.apply_logical_validation( module.apply_logical_validation(
@ -31,7 +32,7 @@ def test_apply_logical_validation_raises_if_archive_name_format_present_without_
{ {
'storage': {'archive_name_format': '{hostname}-{now}'}, 'storage': {'archive_name_format': '{hostname}-{now}'},
'retention': {'keep_daily': 7}, 'retention': {'keep_daily': 7},
'consistency': {'prefix': '{hostname}-'} 'consistency': {'prefix': '{hostname}-'},
}, },
) )
@ -59,15 +60,10 @@ def test_apply_logical_validation_does_not_raise_or_warn_if_archive_name_format_
{ {
'storage': {'archive_name_format': '{hostname}-{now}'}, 'storage': {'archive_name_format': '{hostname}-{now}'},
'retention': {'prefix': '{hostname}-'}, 'retention': {'prefix': '{hostname}-'},
'consistency': {'prefix': '{hostname}-'} 'consistency': {'prefix': '{hostname}-'},
}, },
) )
def test_apply_logical_validation_does_not_raise_otherwise(): def test_apply_logical_validation_does_not_raise_otherwise():
module.apply_logical_validation( module.apply_logical_validation('config.yaml', {'retention': {'keep_secondly': 1000}})
'config.yaml',
{
'retention': {'keep_secondly': 1000},
},
)

View file

@ -4,6 +4,7 @@ from flexmock import flexmock
from borgmatic import verbosity as module from borgmatic import verbosity as module
def insert_logging_mock(log_level): def insert_logging_mock(log_level):
""" Mocks the isEnabledFor from python logging. """ """ Mocks the isEnabledFor from python logging. """
logging = flexmock(module.logging.Logger) logging = flexmock(module.logging.Logger)

View file

@ -1,7 +1,7 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
VERSION = '1.2.6' VERSION = '1.2.7.dev0'
setup( setup(
@ -28,17 +28,8 @@ setup(
'generate-borgmatic-config = borgmatic.commands.generate_config:main', 'generate-borgmatic-config = borgmatic.commands.generate_config:main',
] ]
}, },
obsoletes=[ obsoletes=['atticmatic'],
'atticmatic', install_requires=('pykwalify>=1.6.0,<14.06', 'ruamel.yaml>0.15.0,<0.16.0', 'setuptools'),
], tests_require=('flexmock', 'pytest'),
install_requires=(
'pykwalify>=1.6.0,<14.06',
'ruamel.yaml>0.15.0,<0.16.0',
'setuptools',
),
tests_require=(
'flexmock',
'pytest',
),
include_package_data=True, include_package_data=True,
) )

View file

@ -1,3 +1,4 @@
black==18.9b0
flexmock==0.10.2 flexmock==0.10.2
pykwalify==1.6.1 pykwalify==1.6.1
pytest==3.8.1 pytest==3.8.1

View file

@ -5,4 +5,11 @@ skipsdist=True
[testenv] [testenv]
usedevelop=True usedevelop=True
deps=-rtest_requirements.txt deps=-rtest_requirements.txt
commands = py.test --cov-report term-missing:skip-covered --cov=borgmatic borgmatic [] commands =
py.test --cov-report term-missing:skip-covered --cov=borgmatic borgmatic []
black --skip-string-normalization --line-length 100 --check .
[testenv:black]
basepython=python3.7
commands =
black --skip-string-normalization --line-length 100 .