Reduce the default consistency check frequency and support configuring the frequency independently for each check (#523).
This commit is contained in:
parent
8ddb7268eb
commit
e76bfa555f
16 changed files with 620 additions and 127 deletions
7
NEWS
7
NEWS
|
@ -1,6 +1,9 @@
|
|||
1.6.2.dev0
|
||||
* #536: Fix generate-borgmatic-config with "--source" flag to support more complex schema changes
|
||||
like the new Healthchecks configuration options.
|
||||
* #523: Reduce the default consistency check frequency and support configuring the frequency
|
||||
independently for each check. See the documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-frequency
|
||||
* #536: Fix generate-borgmatic-config to support more complex schema changes like the new
|
||||
Healthchecks configuration options when the "--source" flag is used.
|
||||
* Add Bash completion script so you can tab-complete the borgmatic command-line. See the
|
||||
documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/set-up-backups/#shell-completion
|
||||
|
|
|
@ -37,8 +37,9 @@ retention:
|
|||
consistency:
|
||||
# List of checks to run to validate your backups.
|
||||
checks:
|
||||
- repository
|
||||
- archives
|
||||
- name: repository
|
||||
- name: archives
|
||||
frequency: 2 weeks
|
||||
|
||||
hooks:
|
||||
# Custom preparation scripts to run.
|
||||
|
|
|
@ -1,46 +1,152 @@
|
|||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
from borgmatic.borg import extract
|
||||
from borgmatic.borg import extract, info, state
|
||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command
|
||||
|
||||
DEFAULT_CHECKS = ('repository', 'archives')
|
||||
DEFAULT_CHECKS = (
|
||||
{'name': 'repository', 'frequency': '2 weeks'},
|
||||
{'name': 'archives', 'frequency': '2 weeks'},
|
||||
)
|
||||
DEFAULT_PREFIX = '{hostname}-'
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _parse_checks(consistency_config, only_checks=None):
|
||||
def parse_checks(consistency_config, only_checks=None):
|
||||
'''
|
||||
Given a consistency config with a "checks" list, and an optional list of override checks,
|
||||
transform them a tuple of named checks to run.
|
||||
Given a consistency config with a "checks" sequence of dicts and an optional list of override
|
||||
checks, return a tuple of named checks to run.
|
||||
|
||||
For example, given a retention config of:
|
||||
|
||||
{'checks': ['repository', 'archives']}
|
||||
{'checks': ({'name': 'repository'}, {'name': 'archives'})}
|
||||
|
||||
This will be returned as:
|
||||
|
||||
('repository', 'archives')
|
||||
|
||||
If no "checks" option is present in the config, return the DEFAULT_CHECKS. If the checks value
|
||||
is the string "disabled", return an empty tuple, meaning that no checks should be run.
|
||||
If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value
|
||||
has a name of "disabled", return an empty tuple, meaning that no checks should be run.
|
||||
|
||||
If the "data" option is present, then make sure the "archives" option is included as well.
|
||||
If the "data" check is present, then make sure the "archives" check is included as well.
|
||||
'''
|
||||
checks = [
|
||||
check.lower() for check in (only_checks or consistency_config.get('checks', []) or [])
|
||||
]
|
||||
if checks == ['disabled']:
|
||||
checks = only_checks or tuple(
|
||||
check_config['name']
|
||||
for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS)
|
||||
)
|
||||
checks = tuple(check.lower() for check in checks)
|
||||
if 'disabled' in checks:
|
||||
if len(checks) > 1:
|
||||
logger.warning(
|
||||
'Multiple checks are configured, but one of them is "disabled"; not running any checks'
|
||||
)
|
||||
return ()
|
||||
|
||||
if 'data' in checks and 'archives' not in checks:
|
||||
checks.append('archives')
|
||||
return checks + ('archives',)
|
||||
|
||||
return tuple(check for check in checks if check not in ('disabled', '')) or DEFAULT_CHECKS
|
||||
return checks
|
||||
|
||||
|
||||
def _make_check_flags(checks, check_last=None, prefix=None):
|
||||
def parse_frequency(frequency):
|
||||
'''
|
||||
Given a frequency string with a number and a unit of time, return a corresponding
|
||||
datetime.timedelta instance or None if the frequency is None or "always".
|
||||
|
||||
For instance, given "3 weeks", return datetime.timedelta(weeks=3)
|
||||
|
||||
Raise ValueError if the given frequency cannot be parsed.
|
||||
'''
|
||||
if not frequency:
|
||||
return None
|
||||
|
||||
frequency = frequency.strip().lower()
|
||||
|
||||
if frequency == 'always':
|
||||
return None
|
||||
|
||||
try:
|
||||
number, time_unit = frequency.split(' ')
|
||||
number = int(number)
|
||||
except ValueError:
|
||||
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||
|
||||
if not time_unit.endswith('s'):
|
||||
time_unit += 's'
|
||||
|
||||
if time_unit == 'months':
|
||||
number *= 4
|
||||
time_unit = 'weeks'
|
||||
elif time_unit == 'years':
|
||||
number *= 365
|
||||
time_unit = 'days'
|
||||
|
||||
try:
|
||||
return datetime.timedelta(**{time_unit: number})
|
||||
except TypeError:
|
||||
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||
|
||||
|
||||
def filter_checks_on_frequency(location_config, consistency_config, borg_repository_id, checks):
|
||||
'''
|
||||
Given a location config, a consistency config with a "checks" sequence of dicts, a Borg
|
||||
repository ID, and sequence of checks, filter down those checks based on the configured
|
||||
"frequency" for each check as compared to its check time file.
|
||||
|
||||
In other words, a check whose check time file's timestamp is too new (based on the configured
|
||||
frequency) will get cut from the returned sequence of checks. Example:
|
||||
|
||||
consistency_config = {
|
||||
'checks': [
|
||||
{
|
||||
'name': 'archives',
|
||||
'frequency': '2 weeks',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
When this function is called with that consistency_config and "archives" in checks, "archives"
|
||||
will get filtered out of the returned result if its check time file is newer than 2 weeks old,
|
||||
indicating that it's not yet time to run that check again.
|
||||
|
||||
Raise ValueError if a frequency cannot be parsed.
|
||||
'''
|
||||
filtered_checks = list(checks)
|
||||
|
||||
for check_config in consistency_config.get('checks', DEFAULT_CHECKS):
|
||||
check = check_config['name']
|
||||
if checks and check not in checks:
|
||||
continue
|
||||
|
||||
frequency_delta = parse_frequency(check_config.get('frequency'))
|
||||
if not frequency_delta:
|
||||
continue
|
||||
|
||||
check_time = read_check_time(
|
||||
make_check_time_path(location_config, borg_repository_id, check)
|
||||
)
|
||||
if not check_time:
|
||||
continue
|
||||
|
||||
# If we've not yet reached the time when the frequency dictates we're ready for another
|
||||
# check, skip this check.
|
||||
if datetime.datetime.now() < check_time + frequency_delta:
|
||||
remaining = check_time + frequency_delta - datetime.datetime.now()
|
||||
logger.info(
|
||||
f"Skipping {check} check due to configured frequency; {remaining} until next check"
|
||||
)
|
||||
filtered_checks.remove(check)
|
||||
|
||||
return tuple(filtered_checks)
|
||||
|
||||
|
||||
def make_check_flags(checks, check_last=None, prefix=None):
|
||||
'''
|
||||
Given a parsed sequence of checks, transform it into tuple of command-line flags.
|
||||
|
||||
|
@ -66,27 +172,67 @@ def _make_check_flags(checks, check_last=None, prefix=None):
|
|||
last_flags = ()
|
||||
prefix_flags = ()
|
||||
if check_last:
|
||||
logger.warning(
|
||||
'Ignoring check_last option, as "archives" is not in consistency checks.'
|
||||
)
|
||||
logger.info('Ignoring check_last option, as "archives" is not in consistency checks')
|
||||
if prefix:
|
||||
logger.warning(
|
||||
'Ignoring consistency prefix option, as "archives" is not in consistency checks.'
|
||||
logger.info(
|
||||
'Ignoring consistency prefix option, as "archives" is not in consistency checks'
|
||||
)
|
||||
|
||||
common_flags = last_flags + prefix_flags + (('--verify-data',) if 'data' in checks else ())
|
||||
|
||||
if set(DEFAULT_CHECKS).issubset(set(checks)):
|
||||
if {'repository', 'archives'}.issubset(set(checks)):
|
||||
return common_flags
|
||||
|
||||
return (
|
||||
tuple('--{}-only'.format(check) for check in checks if check in DEFAULT_CHECKS)
|
||||
tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives'))
|
||||
+ common_flags
|
||||
)
|
||||
|
||||
|
||||
def make_check_time_path(location_config, borg_repository_id, check_type):
|
||||
'''
|
||||
Given a location configuration dict, a Borg repository ID, and the name of a check type
|
||||
("repository", "archives", etc.), return a path for recording that check's time (the time of
|
||||
that check last occurring).
|
||||
'''
|
||||
return os.path.join(
|
||||
os.path.expanduser(
|
||||
location_config.get(
|
||||
'borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||
)
|
||||
),
|
||||
'checks',
|
||||
borg_repository_id,
|
||||
check_type,
|
||||
)
|
||||
|
||||
|
||||
def write_check_time(path): # pragma: no cover
|
||||
'''
|
||||
Record a check time of now as the modification time of the given path.
|
||||
'''
|
||||
logger.debug(f'Writing check time at {path}')
|
||||
|
||||
os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True)
|
||||
pathlib.Path(path, mode=0o600).touch()
|
||||
|
||||
|
||||
def read_check_time(path):
|
||||
'''
|
||||
Return the check time based on the modification time of the given path. Return None if the path
|
||||
doesn't exist.
|
||||
'''
|
||||
logger.debug(f'Reading check time from {path}')
|
||||
|
||||
try:
|
||||
return datetime.datetime.fromtimestamp(os.stat(path).st_mtime)
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
|
||||
def check_archives(
|
||||
repository,
|
||||
location_config,
|
||||
storage_config,
|
||||
consistency_config,
|
||||
local_path='borg',
|
||||
|
@ -102,13 +248,33 @@ def check_archives(
|
|||
Borg archives for consistency.
|
||||
|
||||
If there are no consistency checks to run, skip running them.
|
||||
|
||||
Raises ValueError if the Borg repository ID cannot be determined.
|
||||
'''
|
||||
checks = _parse_checks(consistency_config, only_checks)
|
||||
try:
|
||||
borg_repository_id = json.loads(
|
||||
info.display_archives_info(
|
||||
repository,
|
||||
storage_config,
|
||||
argparse.Namespace(json=True, archive=None),
|
||||
local_path,
|
||||
remote_path,
|
||||
)
|
||||
)['repository']['id']
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
raise ValueError(f'Cannot determine Borg repository ID for {repository}')
|
||||
|
||||
checks = filter_checks_on_frequency(
|
||||
location_config,
|
||||
consistency_config,
|
||||
borg_repository_id,
|
||||
parse_checks(consistency_config, only_checks),
|
||||
)
|
||||
check_last = consistency_config.get('check_last', None)
|
||||
lock_wait = None
|
||||
extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '')
|
||||
|
||||
if set(checks).intersection(set(DEFAULT_CHECKS + ('data',))):
|
||||
if set(checks).intersection({'repository', 'archives', 'data'}):
|
||||
lock_wait = storage_config.get('lock_wait', None)
|
||||
|
||||
verbosity_flags = ()
|
||||
|
@ -122,7 +288,7 @@ def check_archives(
|
|||
full_command = (
|
||||
(local_path, 'check')
|
||||
+ (('--repair',) if repair else ())
|
||||
+ _make_check_flags(checks, check_last, prefix)
|
||||
+ make_check_flags(checks, check_last, prefix)
|
||||
+ (('--remote-path', remote_path) if remote_path else ())
|
||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||
+ verbosity_flags
|
||||
|
@ -131,12 +297,16 @@ def check_archives(
|
|||
+ (repository,)
|
||||
)
|
||||
|
||||
# The Borg repair option trigger an interactive prompt, which won't work when output is
|
||||
# The Borg repair option triggers an interactive prompt, which won't work when output is
|
||||
# captured. And progress messes with the terminal directly.
|
||||
if repair or progress:
|
||||
execute_command(full_command, output_file=DO_NOT_CAPTURE)
|
||||
else:
|
||||
execute_command(full_command)
|
||||
|
||||
for check in checks:
|
||||
write_check_time(make_check_time_path(location_config, borg_repository_id, check))
|
||||
|
||||
if 'extract' in checks:
|
||||
extract.extract_last_archive_dry_run(repository, lock_wait, local_path, remote_path)
|
||||
write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract'))
|
||||
|
|
|
@ -5,7 +5,7 @@ import os
|
|||
import pathlib
|
||||
import tempfile
|
||||
|
||||
from borgmatic.borg import feature
|
||||
from borgmatic.borg import feature, state
|
||||
from borgmatic.execute import DO_NOT_CAPTURE, execute_command, execute_command_with_processes
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -175,7 +175,7 @@ def make_exclude_flags(location_config, exclude_filename=None):
|
|||
)
|
||||
|
||||
|
||||
DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
|
||||
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||
|
||||
|
||||
def borgmatic_source_directories(borgmatic_source_directory):
|
||||
|
@ -183,7 +183,7 @@ def borgmatic_source_directories(borgmatic_source_directory):
|
|||
Return a list of borgmatic-specific source directories used for state like database backups.
|
||||
'''
|
||||
if not borgmatic_source_directory:
|
||||
borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||
borgmatic_source_directory = state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||
|
||||
return (
|
||||
[borgmatic_source_directory]
|
||||
|
@ -192,9 +192,6 @@ def borgmatic_source_directories(borgmatic_source_directory):
|
|||
)
|
||||
|
||||
|
||||
DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||
|
||||
|
||||
def create_archive(
|
||||
dry_run,
|
||||
repository,
|
||||
|
|
1
borgmatic/borg/state.py
Normal file
1
borgmatic/borg/state.py
Normal file
|
@ -0,0 +1 @@
|
|||
DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic'
|
|
@ -354,7 +354,7 @@ def make_parsers():
|
|||
choices=('repository', 'archives', 'data', 'extract'),
|
||||
dest='only',
|
||||
action='append',
|
||||
help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks; can specify flag multiple times',
|
||||
help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks (subject to configured frequency, can specify flag multiple times)',
|
||||
)
|
||||
check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit')
|
||||
|
||||
|
|
|
@ -395,6 +395,7 @@ def run_actions(
|
|||
logger.info('{}: Running consistency checks'.format(repository))
|
||||
borg_check.check_archives(
|
||||
repository,
|
||||
location,
|
||||
storage,
|
||||
consistency,
|
||||
local_path=local_path,
|
||||
|
|
|
@ -24,3 +24,8 @@ def normalize(config):
|
|||
cronhub = config.get('hooks', {}).get('cronhub')
|
||||
if isinstance(cronhub, str):
|
||||
config['hooks']['cronhub'] = {'ping_url': cronhub}
|
||||
|
||||
# Upgrade consistency checks from a list of strings to a list of dicts.
|
||||
checks = config.get('consistency', {}).get('checks')
|
||||
if isinstance(checks, list) and len(checks) and isinstance(checks[0], str):
|
||||
config['consistency']['checks'] = [{'name': check_type} for check_type in checks]
|
||||
|
|
|
@ -447,6 +447,11 @@ properties:
|
|||
checks:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
required: ['name']
|
||||
additionalProperties: false
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
enum:
|
||||
- repository
|
||||
|
@ -454,19 +459,33 @@ properties:
|
|||
- data
|
||||
- extract
|
||||
- disabled
|
||||
uniqueItems: true
|
||||
description: |
|
||||
List of one or more consistency checks to run: "repository",
|
||||
"archives", "data", and/or "extract". Defaults to
|
||||
"repository" and "archives". Set to "disabled" to disable
|
||||
all consistency checks. "repository" checks the consistency
|
||||
of the repository, "archives" checks all of the archives,
|
||||
"data" verifies the integrity of the data within the
|
||||
archives, and "extract" does an extraction dry-run of the
|
||||
most recent archive. Note that "data" implies "archives".
|
||||
example:
|
||||
- repository
|
||||
- archives
|
||||
Name of consistency check to run: "repository",
|
||||
"archives", "data", and/or "extract". Set to
|
||||
"disabled" to disable all consistency checks.
|
||||
"repository" checks the consistency of the
|
||||
repository, "archives" checks all of the
|
||||
archives, "data" verifies the integrity of the
|
||||
data within the archives, and "extract" does an
|
||||
extraction dry-run of the most recent archive.
|
||||
Note that "data" implies "archives".
|
||||
example: repository
|
||||
frequency:
|
||||
type: string
|
||||
description: |
|
||||
How frequently to run this type of consistency
|
||||
check (as a best effort). The value is a number
|
||||
followed by a unit of time. E.g., "2 weeks" to
|
||||
run this consistency check no more than every
|
||||
two weeks for a given repository or "1 month" to
|
||||
run it no more than monthly. Defaults to
|
||||
"always": running this check every time checks
|
||||
are run.
|
||||
example: 2 weeks
|
||||
description: |
|
||||
List of one or more consistency checks to run on a periodic
|
||||
basis (if "frequency" is set) or every time borgmatic runs
|
||||
checks (if "frequency" is omitted).
|
||||
check_repositories:
|
||||
type: array
|
||||
items:
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import os
|
||||
import shutil
|
||||
|
||||
from borgmatic.borg.create import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||
from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ consistency checks with `check` on a much less frequent basis (e.g. with
|
|||
|
||||
Another option is to customize your consistency checks. The default
|
||||
consistency checks run both full-repository checks and per-archive checks
|
||||
within each repository.
|
||||
within each repository no more than once every two weeks.
|
||||
|
||||
But if you find that archive checks are too slow, for example, you can
|
||||
configure borgmatic to run repository checks only. Configure this in the
|
||||
|
@ -58,7 +58,7 @@ configure borgmatic to run repository checks only. Configure this in the
|
|||
```yaml
|
||||
consistency:
|
||||
checks:
|
||||
- repository
|
||||
- name: repository
|
||||
```
|
||||
|
||||
Here are the available checks from fastest to slowest:
|
||||
|
@ -70,6 +70,33 @@ Here are the available checks from fastest to slowest:
|
|||
|
||||
See [Borg's check documentation](https://borgbackup.readthedocs.io/en/stable/usage/check.html) for more information.
|
||||
|
||||
### Check frequency
|
||||
|
||||
You can optionally configure checks to run on a periodic basis rather than
|
||||
every time borgmatic runs checks. For instance:
|
||||
|
||||
```yaml
|
||||
consistency:
|
||||
checks:
|
||||
- name: repository
|
||||
frequency: 2 weeks
|
||||
```
|
||||
|
||||
This tells borgmatic to run this consistency check at most once every two
|
||||
weeks for a given repository. The `frequency` value is a number followed by a
|
||||
unit of time, e.g. "3 days", "1 week", "2 months", etc. The `frequency`
|
||||
defaults to "always", which means run this check every time checks run.
|
||||
|
||||
Unlike a real scheduler like cron, borgmatic only makes a best effort to run
|
||||
checks on the configured frequency. It compares that frequency with how long
|
||||
it's been since the last check for a given repository (as recorded in a file
|
||||
within `~/.borgmatic/checks`). If it hasn't been long enough, the check is
|
||||
skipped. And you still have to run `borgmatic check` (or just `borgmatic`) in
|
||||
order for checks to run, even when a `frequency` is configured!
|
||||
|
||||
|
||||
### Disabling checks
|
||||
|
||||
If that's still too slow, you can disable consistency checks entirely,
|
||||
either for a single repository or for all repositories.
|
||||
|
||||
|
@ -78,7 +105,7 @@ Disabling all consistency checks looks like this:
|
|||
```yaml
|
||||
consistency:
|
||||
checks:
|
||||
- disabled
|
||||
- name: disabled
|
||||
```
|
||||
|
||||
Or, if you have multiple repositories in your borgmatic configuration file,
|
||||
|
@ -99,7 +126,8 @@ borgmatic check --only data --only extract
|
|||
```
|
||||
|
||||
This is useful for running slow consistency checks on an infrequent basis,
|
||||
separate from your regular checks.
|
||||
separate from your regular checks. It is still subject to any configured
|
||||
check frequencies.
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
|
2
setup.py
2
setup.py
|
@ -30,11 +30,11 @@ setup(
|
|||
},
|
||||
obsoletes=['atticmatic'],
|
||||
install_requires=(
|
||||
'colorama>=0.4.1,<0.5',
|
||||
'jsonschema',
|
||||
'requests',
|
||||
'ruamel.yaml>0.15.0,<0.18.0',
|
||||
'setuptools',
|
||||
'colorama>=0.4.1,<0.5',
|
||||
),
|
||||
include_package_data=True,
|
||||
python_requires='>=3.7',
|
||||
|
|
|
@ -55,8 +55,8 @@ def test_parse_configuration_transforms_file_into_mapping():
|
|||
|
||||
consistency:
|
||||
checks:
|
||||
- repository
|
||||
- archives
|
||||
- name: repository
|
||||
- name: archives
|
||||
'''
|
||||
)
|
||||
|
||||
|
@ -65,7 +65,7 @@ def test_parse_configuration_transforms_file_into_mapping():
|
|||
assert result == {
|
||||
'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']},
|
||||
'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60},
|
||||
'consistency': {'checks': ['repository', 'archives']},
|
||||
'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]},
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -17,172 +17,317 @@ def insert_execute_command_never():
|
|||
|
||||
|
||||
def test_parse_checks_returns_them_as_tuple():
|
||||
checks = module._parse_checks({'checks': ['foo', 'disabled', 'bar']})
|
||||
checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'bar'}]})
|
||||
|
||||
assert checks == ('foo', 'bar')
|
||||
|
||||
|
||||
def test_parse_checks_with_missing_value_returns_defaults():
|
||||
checks = module._parse_checks({})
|
||||
checks = module.parse_checks({})
|
||||
|
||||
assert checks == module.DEFAULT_CHECKS
|
||||
assert checks == ('repository', 'archives')
|
||||
|
||||
|
||||
def test_parse_checks_with_blank_value_returns_defaults():
|
||||
checks = module._parse_checks({'checks': []})
|
||||
def test_parse_checks_with_empty_list_returns_defaults():
|
||||
checks = module.parse_checks({'checks': []})
|
||||
|
||||
assert checks == module.DEFAULT_CHECKS
|
||||
assert checks == ('repository', 'archives')
|
||||
|
||||
|
||||
def test_parse_checks_with_none_value_returns_defaults():
|
||||
checks = module._parse_checks({'checks': None})
|
||||
checks = module.parse_checks({'checks': None})
|
||||
|
||||
assert checks == module.DEFAULT_CHECKS
|
||||
assert checks == ('repository', 'archives')
|
||||
|
||||
|
||||
def test_parse_checks_with_disabled_returns_no_checks():
|
||||
checks = module._parse_checks({'checks': ['disabled']})
|
||||
checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'disabled'}]})
|
||||
|
||||
assert checks == ()
|
||||
|
||||
|
||||
def test_parse_checks_with_data_check_also_injects_archives():
|
||||
checks = module._parse_checks({'checks': ['data']})
|
||||
checks = module.parse_checks({'checks': [{'name': 'data'}]})
|
||||
|
||||
assert checks == ('data', 'archives')
|
||||
|
||||
|
||||
def test_parse_checks_with_data_check_passes_through_archives():
|
||||
checks = module._parse_checks({'checks': ['data', 'archives']})
|
||||
checks = module.parse_checks({'checks': [{'name': 'data'}, {'name': 'archives'}]})
|
||||
|
||||
assert checks == ('data', 'archives')
|
||||
|
||||
|
||||
def test_parse_checks_prefers_override_checks_to_configured_checks():
|
||||
checks = module._parse_checks({'checks': ['archives']}, only_checks=['repository', 'extract'])
|
||||
checks = module.parse_checks(
|
||||
{'checks': [{'name': 'archives'}]}, only_checks=['repository', 'extract']
|
||||
)
|
||||
|
||||
assert checks == ('repository', 'extract')
|
||||
|
||||
|
||||
def test_parse_checks_with_override_data_check_also_injects_archives():
|
||||
checks = module._parse_checks({'checks': ['extract']}, only_checks=['data'])
|
||||
checks = module.parse_checks({'checks': [{'name': 'extract'}]}, only_checks=['data'])
|
||||
|
||||
assert checks == ('data', 'archives')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'frequency,expected_result',
|
||||
(
|
||||
(None, None),
|
||||
('always', None),
|
||||
('1 hour', module.datetime.timedelta(hours=1)),
|
||||
('2 hours', module.datetime.timedelta(hours=2)),
|
||||
('1 day', module.datetime.timedelta(days=1)),
|
||||
('2 days', module.datetime.timedelta(days=2)),
|
||||
('1 week', module.datetime.timedelta(weeks=1)),
|
||||
('2 weeks', module.datetime.timedelta(weeks=2)),
|
||||
('1 month', module.datetime.timedelta(weeks=4)),
|
||||
('2 months', module.datetime.timedelta(weeks=8)),
|
||||
('1 year', module.datetime.timedelta(days=365)),
|
||||
('2 years', module.datetime.timedelta(days=365 * 2)),
|
||||
),
|
||||
)
|
||||
def test_parse_frequency_parses_into_timedeltas(frequency, expected_result):
|
||||
assert module.parse_frequency(frequency) == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'frequency', ('sometime', 'x days', '3 decades',),
|
||||
)
|
||||
def test_parse_frequency_raises_on_parse_error(frequency):
|
||||
with pytest.raises(ValueError):
|
||||
module.parse_frequency(frequency)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_without_config_uses_default_checks():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(
|
||||
module.datetime.timedelta(weeks=2)
|
||||
)
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('read_check_time').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
location_config={},
|
||||
consistency_config={},
|
||||
borg_repository_id='repo',
|
||||
checks=('repository', 'archives'),
|
||||
) == ('repository', 'archives')
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_unconfigured_check():
|
||||
assert module.filter_checks_on_frequency(
|
||||
location_config={}, consistency_config={}, borg_repository_id='repo', checks=('data',),
|
||||
) == ('data',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_without_frequency():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
location_config={},
|
||||
consistency_config={'checks': [{'name': 'archives'}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(
|
||||
module.datetime.timedelta(hours=1)
|
||||
)
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('read_check_time').and_return(
|
||||
module.datetime.datetime(year=module.datetime.MINYEAR, month=1, day=1)
|
||||
)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
location_config={},
|
||||
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(
|
||||
module.datetime.timedelta(hours=1)
|
||||
)
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('read_check_time').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
location_config={},
|
||||
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(
|
||||
module.datetime.timedelta(hours=1)
|
||||
)
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('read_check_time').and_return(module.datetime.datetime.now())
|
||||
|
||||
assert (
|
||||
module.filter_checks_on_frequency(
|
||||
location_config={},
|
||||
consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
)
|
||||
== ()
|
||||
)
|
||||
|
||||
|
||||
def test_make_check_flags_with_repository_check_returns_flag():
|
||||
flags = module._make_check_flags(('repository',))
|
||||
flags = module.make_check_flags(('repository',))
|
||||
|
||||
assert flags == ('--repository-only',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_archives_check_returns_flag():
|
||||
flags = module._make_check_flags(('archives',))
|
||||
flags = module.make_check_flags(('archives',))
|
||||
|
||||
assert flags == ('--archives-only',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_data_check_returns_flag():
|
||||
flags = module._make_check_flags(('data',))
|
||||
flags = module.make_check_flags(('data',))
|
||||
|
||||
assert flags == ('--verify-data',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_extract_omits_extract_flag():
|
||||
flags = module._make_check_flags(('extract',))
|
||||
flags = module.make_check_flags(('extract',))
|
||||
|
||||
assert flags == ()
|
||||
|
||||
|
||||
def test_make_check_flags_with_default_checks_and_default_prefix_returns_default_flags():
|
||||
flags = module._make_check_flags(module.DEFAULT_CHECKS, prefix=module.DEFAULT_PREFIX)
|
||||
flags = module.make_check_flags(('repository', 'archives'), prefix=module.DEFAULT_PREFIX)
|
||||
|
||||
assert flags == ('--prefix', module.DEFAULT_PREFIX)
|
||||
|
||||
|
||||
def test_make_check_flags_with_all_checks_and_default_prefix_returns_default_flags():
|
||||
flags = module._make_check_flags(
|
||||
module.DEFAULT_CHECKS + ('extract',), prefix=module.DEFAULT_PREFIX
|
||||
flags = module.make_check_flags(
|
||||
('repository', 'archives', 'extract'), prefix=module.DEFAULT_PREFIX
|
||||
)
|
||||
|
||||
assert flags == ('--prefix', module.DEFAULT_PREFIX)
|
||||
|
||||
|
||||
def test_make_check_flags_with_archives_check_and_last_includes_last_flag():
|
||||
flags = module._make_check_flags(('archives',), check_last=3)
|
||||
flags = module.make_check_flags(('archives',), check_last=3)
|
||||
|
||||
assert flags == ('--archives-only', '--last', '3')
|
||||
|
||||
|
||||
def test_make_check_flags_with_repository_check_and_last_omits_last_flag():
|
||||
flags = module._make_check_flags(('repository',), check_last=3)
|
||||
flags = module.make_check_flags(('repository',), check_last=3)
|
||||
|
||||
assert flags == ('--repository-only',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_default_checks_and_last_includes_last_flag():
|
||||
flags = module._make_check_flags(module.DEFAULT_CHECKS, check_last=3)
|
||||
flags = module.make_check_flags(('repository', 'archives'), check_last=3)
|
||||
|
||||
assert flags == ('--last', '3')
|
||||
|
||||
|
||||
def test_make_check_flags_with_archives_check_and_prefix_includes_prefix_flag():
|
||||
flags = module._make_check_flags(('archives',), prefix='foo-')
|
||||
flags = module.make_check_flags(('archives',), prefix='foo-')
|
||||
|
||||
assert flags == ('--archives-only', '--prefix', 'foo-')
|
||||
|
||||
|
||||
def test_make_check_flags_with_archives_check_and_empty_prefix_omits_prefix_flag():
|
||||
flags = module._make_check_flags(('archives',), prefix='')
|
||||
flags = module.make_check_flags(('archives',), prefix='')
|
||||
|
||||
assert flags == ('--archives-only',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_archives_check_and_none_prefix_omits_prefix_flag():
|
||||
flags = module._make_check_flags(('archives',), prefix=None)
|
||||
flags = module.make_check_flags(('archives',), prefix=None)
|
||||
|
||||
assert flags == ('--archives-only',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_repository_check_and_prefix_omits_prefix_flag():
|
||||
flags = module._make_check_flags(('repository',), prefix='foo-')
|
||||
flags = module.make_check_flags(('repository',), prefix='foo-')
|
||||
|
||||
assert flags == ('--repository-only',)
|
||||
|
||||
|
||||
def test_make_check_flags_with_default_checks_and_prefix_includes_prefix_flag():
|
||||
flags = module._make_check_flags(module.DEFAULT_CHECKS, prefix='foo-')
|
||||
flags = module.make_check_flags(('repository', 'archives'), prefix='foo-')
|
||||
|
||||
assert flags == ('--prefix', 'foo-')
|
||||
|
||||
|
||||
def test_read_check_time_does_not_raise():
|
||||
flexmock(module.os).should_receive('stat').and_return(flexmock(st_mtime=123))
|
||||
|
||||
assert module.read_check_time('/path')
|
||||
|
||||
|
||||
def test_read_check_time_on_missing_file_does_not_raise():
|
||||
flexmock(module.os).should_receive('stat').and_raise(FileNotFoundError)
|
||||
|
||||
assert module.read_check_time('/path') is None
|
||||
|
||||
|
||||
def test_check_archives_with_progress_calls_borg_with_progress_parameter():
|
||||
checks = ('repository',)
|
||||
consistency_config = {'check_last': None}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'check', '--progress', 'repo'), output_file=module.DO_NOT_CAPTURE
|
||||
).once()
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config, progress=True
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
progress=True,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_repair_calls_borg_with_repair_parameter():
|
||||
checks = ('repository',)
|
||||
consistency_config = {'check_last': None}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'check', '--repair', 'repo'), output_file=module.DO_NOT_CAPTURE
|
||||
).once()
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config, repair=True
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
repair=True,
|
||||
)
|
||||
|
||||
|
||||
|
@ -198,14 +343,59 @@ def test_check_archives_with_repair_calls_borg_with_repair_parameter():
|
|||
def test_check_archives_calls_borg_with_parameters(checks):
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').with_args(
|
||||
checks, check_last, module.DEFAULT_PREFIX
|
||||
).and_return(())
|
||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_json_error_raises():
|
||||
checks = ('archives',)
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"unexpected": {"id": "repo"}}'
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
module.check_archives(
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_missing_json_keys_raises():
|
||||
checks = ('archives',)
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return('{invalid JSON')
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
module.check_archives(
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
|
@ -213,49 +403,82 @@ def test_check_archives_with_extract_check_calls_extract_only():
|
|||
checks = ('extract',)
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').never()
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').never()
|
||||
flexmock(module.extract).should_receive('extract_last_archive_dry_run').once()
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
insert_execute_command_never()
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_log_info_calls_borg_with_info_parameter():
|
||||
checks = ('repository',)
|
||||
consistency_config = {'check_last': None}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
insert_logging_mock(logging.INFO)
|
||||
insert_execute_command_mock(('borg', 'check', '--info', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_log_debug_calls_borg_with_debug_parameter():
|
||||
checks = ('repository',)
|
||||
consistency_config = {'check_last': None}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
insert_logging_mock(logging.DEBUG)
|
||||
insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_without_any_checks_bails():
|
||||
consistency_config = {'check_last': None}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(())
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(())
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
insert_execute_command_never()
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
|
@ -263,14 +486,21 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
|
|||
checks = ('repository',)
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').with_args(
|
||||
checks, check_last, module.DEFAULT_PREFIX
|
||||
).and_return(())
|
||||
insert_execute_command_mock(('borg1', 'check', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
local_path='borg1',
|
||||
|
@ -281,14 +511,21 @@ def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(
|
|||
checks = ('repository',)
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').with_args(
|
||||
checks, check_last, module.DEFAULT_PREFIX
|
||||
).and_return(())
|
||||
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
remote_path='borg1',
|
||||
|
@ -299,14 +536,23 @@ def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters():
|
|||
checks = ('repository',)
|
||||
check_last = flexmock()
|
||||
consistency_config = {'check_last': check_last}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').with_args(
|
||||
checks, check_last, module.DEFAULT_PREFIX
|
||||
).and_return(())
|
||||
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={'lock_wait': 5}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={'lock_wait': 5},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
|
@ -315,26 +561,42 @@ def test_check_archives_with_retention_prefix():
|
|||
check_last = flexmock()
|
||||
prefix = 'foo-'
|
||||
consistency_config = {'check_last': check_last, 'prefix': prefix}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').with_args(
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').with_args(
|
||||
checks, check_last, prefix
|
||||
).and_return(())
|
||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo', storage_config={}, consistency_config=consistency_config
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options():
|
||||
checks = ('repository',)
|
||||
consistency_config = {'check_last': None}
|
||||
flexmock(module).should_receive('_parse_checks').and_return(checks)
|
||||
flexmock(module).should_receive('_make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('parse_checks')
|
||||
flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks)
|
||||
flexmock(module.info).should_receive('display_archives_info').and_return(
|
||||
'{"repository": {"id": "repo"}}'
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
|
||||
flexmock(module).should_receive('make_check_time_path')
|
||||
flexmock(module).should_receive('write_check_time')
|
||||
|
||||
module.check_archives(
|
||||
repository='repo',
|
||||
location_config={},
|
||||
storage_config={'extra_borg_options': {'check': '--extra --options'}},
|
||||
consistency_config=consistency_config,
|
||||
)
|
||||
|
|
|
@ -271,7 +271,9 @@ def test_borgmatic_source_directories_defaults_when_directory_not_given():
|
|||
flexmock(module.os.path).should_receive('exists').and_return(True)
|
||||
flexmock(module.os.path).should_receive('expanduser')
|
||||
|
||||
assert module.borgmatic_source_directories(None) == [module.DEFAULT_BORGMATIC_SOURCE_DIRECTORY]
|
||||
assert module.borgmatic_source_directories(None) == [
|
||||
module.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY
|
||||
]
|
||||
|
||||
|
||||
DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}'
|
||||
|
|
|
@ -35,6 +35,10 @@ from borgmatic.config import normalize as module
|
|||
{'hooks': {'cronhub': 'https://example.com'}},
|
||||
{'hooks': {'cronhub': {'ping_url': 'https://example.com'}}},
|
||||
),
|
||||
(
|
||||
{'consistency': {'checks': ['archives']}},
|
||||
{'consistency': {'checks': [{'name': 'archives'}]}},
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_normalize_applies_hard_coded_normalization_to_config(config, expected_config):
|
||||
|
|
Loading…
Reference in a new issue