Compare commits
No commits in common. "main" and "addUptimeKumaTests" have entirely different histories.
main
...
addUptimeK
21 changed files with 231 additions and 687 deletions
16
NEWS
16
NEWS
|
@ -1,21 +1,7 @@
|
|||
1.8.13.dev0
|
||||
* #785: Add an "only_run_on" option to consistency checks so you can limit a check to running on
|
||||
particular days of the week. See the documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-days
|
||||
* #885: Add Uptime Kuma monitoring hook. See the documentation for more information:
|
||||
https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#uptime-kuma-hook
|
||||
* #886: Fix a PagerDuty hook traceback with Python < 3.10.
|
||||
* #889: Fix the Healthchecks ping body size limit, restoring it to the documented 100,000 bytes.
|
||||
|
||||
1.8.12
|
||||
* #817: Add a "--max-duration" flag to the "check" action and a "max_duration" option to the
|
||||
repository check configuration. This tells Borg to interrupt a repository check after a certain
|
||||
duration.
|
||||
1.8.12.dev0
|
||||
* #860: Fix interaction between environment variable interpolation in constants and shell escaping.
|
||||
* #863: When color output is disabled (explicitly or implicitly), don't prefix each log line with
|
||||
the log level.
|
||||
* #865: Add an "upload_buffer_size" option to set the size of the upload buffer used in "create"
|
||||
action.
|
||||
* #866: Fix "Argument list too long" error in the "spot" check when checking hundreds of thousands
|
||||
of files at once.
|
||||
* #874: Add the configured repository label as "repository_label" to the interpolated variables
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import calendar
|
||||
import datetime
|
||||
import hashlib
|
||||
import itertools
|
||||
|
@ -100,17 +99,12 @@ def parse_frequency(frequency):
|
|||
raise ValueError(f"Could not parse consistency check frequency '{frequency}'")
|
||||
|
||||
|
||||
WEEKDAY_DAYS = calendar.day_name[0:5]
|
||||
WEEKEND_DAYS = calendar.day_name[5:7]
|
||||
|
||||
|
||||
def filter_checks_on_frequency(
|
||||
config,
|
||||
borg_repository_id,
|
||||
checks,
|
||||
force,
|
||||
archives_check_id=None,
|
||||
datetime_now=datetime.datetime.now,
|
||||
):
|
||||
'''
|
||||
Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence
|
||||
|
@ -149,29 +143,6 @@ def filter_checks_on_frequency(
|
|||
if checks and check not in checks:
|
||||
continue
|
||||
|
||||
only_run_on = check_config.get('only_run_on')
|
||||
if only_run_on:
|
||||
# Use a dict instead of a set to preserve ordering.
|
||||
days = dict.fromkeys(only_run_on)
|
||||
|
||||
if 'weekday' in days:
|
||||
days = {
|
||||
**dict.fromkeys(day for day in days if day != 'weekday'),
|
||||
**dict.fromkeys(WEEKDAY_DAYS),
|
||||
}
|
||||
if 'weekend' in days:
|
||||
days = {
|
||||
**dict.fromkeys(day for day in days if day != 'weekend'),
|
||||
**dict.fromkeys(WEEKEND_DAYS),
|
||||
}
|
||||
|
||||
if calendar.day_name[datetime_now().weekday()] not in days:
|
||||
logger.info(
|
||||
f"Skipping {check} check due to day of the week; check only runs on {'/'.join(days)} (use --force to check anyway)"
|
||||
)
|
||||
filtered_checks.remove(check)
|
||||
continue
|
||||
|
||||
frequency_delta = parse_frequency(check_config.get('frequency'))
|
||||
if not frequency_delta:
|
||||
continue
|
||||
|
@ -182,8 +153,8 @@ def filter_checks_on_frequency(
|
|||
|
||||
# If we've not yet reached the time when the frequency dictates we're ready for another
|
||||
# check, skip this check.
|
||||
if datetime_now() < check_time + frequency_delta:
|
||||
remaining = check_time + frequency_delta - datetime_now()
|
||||
if datetime.datetime.now() < check_time + frequency_delta:
|
||||
remaining = check_time + frequency_delta - datetime.datetime.now()
|
||||
logger.info(
|
||||
f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)'
|
||||
)
|
||||
|
|
|
@ -50,10 +50,10 @@ def make_archive_filter_flags(local_borg_version, config, checks, check_argument
|
|||
return ()
|
||||
|
||||
|
||||
def make_check_name_flags(checks, archive_filter_flags):
|
||||
def make_check_flags(checks, archive_filter_flags):
|
||||
'''
|
||||
Given parsed checks set and a sequence of flags to filter archives, transform the checks into
|
||||
tuple of command-line check flags.
|
||||
Given a parsed checks set and a sequence of flags to filter archives,
|
||||
transform the checks into tuple of command-line check flags.
|
||||
|
||||
For example, given parsed checks of:
|
||||
|
||||
|
@ -134,30 +134,10 @@ def check_archives(
|
|||
if logger.isEnabledFor(logging.DEBUG):
|
||||
verbosity_flags = ('--debug', '--show-rc')
|
||||
|
||||
try:
|
||||
repository_check_config = next(
|
||||
check for check in config.get('checks', ()) if check.get('name') == 'repository'
|
||||
)
|
||||
except StopIteration:
|
||||
repository_check_config = {}
|
||||
|
||||
if check_arguments.max_duration and 'archives' in checks:
|
||||
raise ValueError('The archives check cannot run when the --max-duration flag is used')
|
||||
if repository_check_config.get('max_duration') and 'archives' in checks:
|
||||
raise ValueError(
|
||||
'The archives check cannot run when the repository check has the max_duration option set'
|
||||
)
|
||||
|
||||
max_duration = check_arguments.max_duration or repository_check_config.get('max_duration')
|
||||
|
||||
borg_environment = environment.make_environment(config)
|
||||
borg_exit_codes = config.get('borg_exit_codes')
|
||||
|
||||
full_command = (
|
||||
(local_path, 'check')
|
||||
+ (('--repair',) if check_arguments.repair else ())
|
||||
+ (('--max-duration', str(max_duration)) if max_duration else ())
|
||||
+ make_check_name_flags(checks, archive_filter_flags)
|
||||
+ make_check_flags(checks, archive_filter_flags)
|
||||
+ (('--remote-path', remote_path) if remote_path else ())
|
||||
+ (('--log-json',) if global_arguments.log_json else ())
|
||||
+ (('--lock-wait', str(lock_wait)) if lock_wait else ())
|
||||
|
@ -167,6 +147,9 @@ def check_archives(
|
|||
+ flags.make_repository_flags(repository_path, local_borg_version)
|
||||
)
|
||||
|
||||
borg_environment = environment.make_environment(config)
|
||||
borg_exit_codes = config.get('borg_exit_codes')
|
||||
|
||||
# The Borg repair option triggers an interactive prompt, which won't work when output is
|
||||
# captured. And progress messes with the terminal directly.
|
||||
if check_arguments.repair or check_arguments.progress:
|
||||
|
|
|
@ -371,7 +371,6 @@ def make_base_create_command(
|
|||
chunker_params = config.get('chunker_params', None)
|
||||
compression = config.get('compression', None)
|
||||
upload_rate_limit = config.get('upload_rate_limit', None)
|
||||
upload_buffer_size = config.get('upload_buffer_size', None)
|
||||
umask = config.get('umask', None)
|
||||
lock_wait = config.get('lock_wait', None)
|
||||
list_filter_flags = make_list_filter_flags(local_borg_version, dry_run)
|
||||
|
@ -413,7 +412,6 @@ def make_base_create_command(
|
|||
+ (('--chunker-params', chunker_params) if chunker_params else ())
|
||||
+ (('--compression', compression) if compression else ())
|
||||
+ upload_ratelimit_flags
|
||||
+ (('--upload-buffer', str(upload_buffer_size)) if upload_buffer_size else ())
|
||||
+ (('--one-file-system',) if config.get('one_file_system') or stream_processes else ())
|
||||
+ numeric_ids_flags
|
||||
+ atime_flags
|
||||
|
|
|
@ -661,11 +661,6 @@ def make_parsers():
|
|||
action='store_true',
|
||||
help='Attempt to repair any inconsistencies found (for interactive use)',
|
||||
)
|
||||
check_group.add_argument(
|
||||
'--max-duration',
|
||||
metavar='SECONDS',
|
||||
help='How long to check the repository before interrupting the check, defaults to no interruption',
|
||||
)
|
||||
check_group.add_argument(
|
||||
'-a',
|
||||
'--match-archives',
|
||||
|
|
|
@ -280,11 +280,6 @@ properties:
|
|||
Remote network upload rate limit in kiBytes/second. Defaults to
|
||||
unlimited.
|
||||
example: 100
|
||||
upload_buffer_size:
|
||||
type: integer
|
||||
description: |
|
||||
Size of network upload buffer in MiB. Defaults to no buffer.
|
||||
example: 160
|
||||
retries:
|
||||
type: integer
|
||||
description: |
|
||||
|
@ -516,6 +511,7 @@ properties:
|
|||
name:
|
||||
type: string
|
||||
enum:
|
||||
- repository
|
||||
- archives
|
||||
- data
|
||||
- extract
|
||||
|
@ -546,78 +542,6 @@ properties:
|
|||
"always": running this check every time checks
|
||||
are run.
|
||||
example: 2 weeks
|
||||
only_run_on:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: |
|
||||
After the "frequency" duration has elapsed, only
|
||||
run this check if the current day of the week
|
||||
matches one of these values (the name of a day of
|
||||
the week in the current locale). "weekday" and
|
||||
"weekend" are also accepted. Defaults to running
|
||||
the check on any day of the week.
|
||||
example:
|
||||
- Saturday
|
||||
- Sunday
|
||||
- required: [name]
|
||||
additionalProperties: false
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
enum:
|
||||
- repository
|
||||
description: |
|
||||
Name of consistency check to run: "repository",
|
||||
"archives", "data", "spot", and/or "extract".
|
||||
"repository" checks the consistency of the
|
||||
repository, "archives" checks all of the
|
||||
archives, "data" verifies the integrity of the
|
||||
data within the archives, "spot" checks that
|
||||
some percentage of source files are found in the
|
||||
most recent archive (with identical contents),
|
||||
and "extract" does an extraction dry-run of the
|
||||
most recent archive. Note that "data" implies
|
||||
"archives". See "skip_actions" for disabling
|
||||
checks altogether.
|
||||
example: spot
|
||||
frequency:
|
||||
type: string
|
||||
description: |
|
||||
How frequently to run this type of consistency
|
||||
check (as a best effort). The value is a number
|
||||
followed by a unit of time. E.g., "2 weeks" to
|
||||
run this consistency check no more than every
|
||||
two weeks for a given repository or "1 month" to
|
||||
run it no more than monthly. Defaults to
|
||||
"always": running this check every time checks
|
||||
are run.
|
||||
example: 2 weeks
|
||||
only_run_on:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: |
|
||||
After the "frequency" duration has elapsed, only
|
||||
run this check if the current day of the week
|
||||
matches one of these values (the name of a day of
|
||||
the week in the current locale). "weekday" and
|
||||
"weekend" are also accepted. Defaults to running
|
||||
the check on any day of the week.
|
||||
example:
|
||||
- Saturday
|
||||
- Sunday
|
||||
max_duration:
|
||||
type: integer
|
||||
description: |
|
||||
How many seconds to check the repository before
|
||||
interrupting the check. Useful for splitting a
|
||||
long-running repository check into multiple
|
||||
partial checks. Defaults to no interruption. Only
|
||||
applies to the "repository" check, does not check
|
||||
the repository index, and is not compatible with a
|
||||
simultaneous "archives" check or "--repair" flag.
|
||||
example: 3600
|
||||
- required:
|
||||
- name
|
||||
- count_tolerance_percentage
|
||||
|
@ -655,20 +579,6 @@ properties:
|
|||
"always": running this check every time checks
|
||||
are run.
|
||||
example: 2 weeks
|
||||
only_run_on:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: |
|
||||
After the "frequency" duration has elapsed, only
|
||||
run this check if the current day of the week
|
||||
matches one of these values (the name of a day of
|
||||
the week in the current locale). "weekday" and
|
||||
"weekend" are also accepted. Defaults to running
|
||||
the check on any day of the week.
|
||||
example:
|
||||
- Saturday
|
||||
- Sunday
|
||||
count_tolerance_percentage:
|
||||
type: number
|
||||
description: |
|
||||
|
@ -1766,17 +1676,25 @@ properties:
|
|||
an account at https://healthchecks.io (or self-host Healthchecks) if
|
||||
you'd like to use this service. See borgmatic monitoring
|
||||
documentation for details.
|
||||
uptime_kuma:
|
||||
uptimekuma:
|
||||
type: object
|
||||
required: ['push_url']
|
||||
required: ['server', 'push_code']
|
||||
additionalProperties: false
|
||||
properties:
|
||||
push_url:
|
||||
server:
|
||||
type: string
|
||||
description: |
|
||||
Uptime Kuma push URL without query string (do not include the
|
||||
question mark or anything after it).
|
||||
example: https://example.uptime.kuma/api/push/abcd1234
|
||||
Uptime Kuma base URL or UUID to notify when a backup
|
||||
begins, ends, or errors
|
||||
example: https://example.uptime.kuma
|
||||
push_code:
|
||||
type: string
|
||||
description: |
|
||||
Uptime Kuma "Push Code" from the push URL you have been
|
||||
given. For example, the push code for
|
||||
https://base.url/api/push/12345678?status=up&msg=OK&ping=
|
||||
would be 12345678
|
||||
example: 12345678
|
||||
states:
|
||||
type: array
|
||||
items:
|
||||
|
@ -1787,8 +1705,8 @@ properties:
|
|||
- fail
|
||||
uniqueItems: true
|
||||
description: |
|
||||
List of one or more monitoring states to push for: "start",
|
||||
"finish", and/or "fail". Defaults to pushing for all
|
||||
List of one or more monitoring states to ping for: "start",
|
||||
"finish", and/or "fail". Defaults to pinging for all
|
||||
states.
|
||||
example:
|
||||
- start
|
||||
|
|
|
@ -23,7 +23,6 @@ HOOK_NAME_TO_MODULE = {
|
|||
'cronhub': cronhub,
|
||||
'cronitor': cronitor,
|
||||
'healthchecks': healthchecks,
|
||||
'loki': loki,
|
||||
'mariadb_databases': mariadb,
|
||||
'mongodb_databases': mongodb,
|
||||
'mysql_databases': mysql,
|
||||
|
@ -31,7 +30,8 @@ HOOK_NAME_TO_MODULE = {
|
|||
'pagerduty': pagerduty,
|
||||
'postgresql_databases': postgresql,
|
||||
'sqlite_databases': sqlite,
|
||||
'uptime_kuma': uptimekuma,
|
||||
'loki': loki,
|
||||
'uptimekuma': uptimekuma,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ MONITOR_STATE_TO_HEALTHCHECKS = {
|
|||
monitor.State.LOG: 'log',
|
||||
}
|
||||
|
||||
DEFAULT_PING_BODY_LIMIT_BYTES = 100000
|
||||
DEFAULT_PING_BODY_LIMIT_BYTES = 1500
|
||||
HANDLER_IDENTIFIER = 'healthchecks'
|
||||
|
||||
|
||||
|
|
|
@ -2,13 +2,13 @@ from enum import Enum
|
|||
|
||||
MONITOR_HOOK_NAMES = (
|
||||
'apprise',
|
||||
'cronhub',
|
||||
'cronitor',
|
||||
'healthchecks',
|
||||
'loki',
|
||||
'ntfy',
|
||||
'cronitor',
|
||||
'cronhub',
|
||||
'pagerduty',
|
||||
'uptime_kuma',
|
||||
'ntfy',
|
||||
'loki',
|
||||
'uptimekuma',
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -40,7 +40,9 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev
|
|||
return
|
||||
|
||||
hostname = platform.node()
|
||||
local_timestamp = datetime.datetime.now(datetime.timezone.utc).astimezone().isoformat()
|
||||
local_timestamp = (
|
||||
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).astimezone().isoformat()
|
||||
)
|
||||
payload = json.dumps(
|
||||
{
|
||||
'routing_key': hook_config['integration_key'],
|
||||
|
|
|
@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def initialize_monitor(
|
||||
push_url, config, config_filename, monitoring_log_level, dry_run
|
||||
ping_url, config, config_filename, monitoring_log_level, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
No initialization is necessary for this monitor.
|
||||
|
@ -16,30 +16,35 @@ def initialize_monitor(
|
|||
|
||||
def ping_monitor(hook_config, config, config_filename, state, monitoring_log_level, dry_run):
|
||||
'''
|
||||
Make a get request to the configured Uptime Kuma push_url. Use the given configuration filename
|
||||
in any log entries. If this is a dry run, then don't actually push anything.
|
||||
Ping the configured Uptime Kuma push_code. Use the given configuration filename in any log entries.
|
||||
If this is a dry run, then don't actually ping anything.
|
||||
'''
|
||||
|
||||
run_states = hook_config.get('states', ['start', 'finish', 'fail'])
|
||||
|
||||
if state.name.lower() not in run_states:
|
||||
return
|
||||
if state.name.lower() in run_states:
|
||||
|
||||
dry_run_label = ' (dry run; not actually pushing)' if dry_run else ''
|
||||
status = 'down' if state.name.lower() == 'fail' else 'up'
|
||||
push_url = hook_config.get('push_url', 'https://example.uptime.kuma/api/push/abcd1234')
|
||||
query = f'status={status}&msg={state.name.lower()}'
|
||||
logger.info(
|
||||
f'{config_filename}: Pushing Uptime Kuma push_url {push_url}?{query} {dry_run_label}'
|
||||
dry_run_label = ' (dry run; not actually pinging)' if dry_run else ''
|
||||
|
||||
status = 'up'
|
||||
if state.name.lower() == 'fail':
|
||||
status = 'down'
|
||||
|
||||
base_url = hook_config.get('server', 'https://example.uptime.kuma') + '/api/push'
|
||||
push_code = hook_config.get('push_code')
|
||||
|
||||
logger.info(f'{config_filename}: Pinging Uptime Kuma push_code {push_code}{dry_run_label}')
|
||||
logger.debug(f'{config_filename}: Using Uptime Kuma ping URL {base_url}/{push_code}')
|
||||
logger.debug(
|
||||
f'{config_filename}: Full Uptime Kuma state URL {base_url}/{push_code}?status={status}&msg={state.name.lower()}&ping='
|
||||
)
|
||||
logger.debug(f'{config_filename}: Full Uptime Kuma state URL {push_url}?{query}')
|
||||
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
if not dry_run:
|
||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||
|
||||
try:
|
||||
response = requests.get(f'{push_url}?{query}')
|
||||
response = requests.get(
|
||||
f'{base_url}/{push_code}?status={status}&msg={state.name.lower()}&ping='
|
||||
)
|
||||
if not response.ok:
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.RequestException as error:
|
||||
|
@ -47,7 +52,7 @@ def ping_monitor(hook_config, config, config_filename, state, monitoring_log_lev
|
|||
|
||||
|
||||
def destroy_monitor(
|
||||
push_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
|
||||
ping_url_or_uuid, config, config_filename, monitoring_log_level, dry_run
|
||||
): # pragma: no cover
|
||||
'''
|
||||
No destruction is necessary for this monitor.
|
||||
|
|
|
@ -167,11 +167,12 @@ li {
|
|||
padding: .25em 0;
|
||||
}
|
||||
li ul {
|
||||
list-style-type: disc;
|
||||
padding-left: 2em;
|
||||
margin: .5em 0;
|
||||
padding-left: 1em;
|
||||
}
|
||||
li li:last-child {
|
||||
padding-bottom: 0em;
|
||||
li li {
|
||||
padding-top: .1em;
|
||||
padding-bottom: .1em;
|
||||
}
|
||||
|
||||
/* Syntax highlighting and Code blocks */
|
||||
|
|
|
@ -242,57 +242,6 @@ check --force` runs `check` even if it's specified in the `skip_actions`
|
|||
option.
|
||||
|
||||
|
||||
### Check days
|
||||
|
||||
<span class="minilink minilink-addedin">New in version 1.8.13</span> You can
|
||||
optionally configure checks to only run on particular days of the week. For
|
||||
instance:
|
||||
|
||||
```yaml
|
||||
checks:
|
||||
- name: repository
|
||||
only_run_on:
|
||||
- Saturday
|
||||
- Sunday
|
||||
- name: archives
|
||||
only_run_on:
|
||||
- weekday
|
||||
- name: spot
|
||||
only_run_on:
|
||||
- Friday
|
||||
- weekend
|
||||
```
|
||||
|
||||
Each day of the week is specified in the current locale (system
|
||||
language/country settings). `weekend` and `weekday` are also accepted.
|
||||
|
||||
Just like with `frequency`, borgmatic only makes a best effort to run checks
|
||||
on the given day of the week. For instance, if you run `borgmatic check`
|
||||
daily, then every day borgmatic will have an opportunity to determine whether
|
||||
your checks are configured to run on that day. If they are, then the checks
|
||||
run. If not, they are skipped.
|
||||
|
||||
For instance, with the above configuration, if borgmatic is run on a Saturday,
|
||||
the `repository` check will run. But on a Monday? The repository check will
|
||||
get skipped. And if borgmatic is never run on a Saturday or a Sunday, that
|
||||
check will never get a chance to run.
|
||||
|
||||
Also, the day of the week configuration applies *after* any configured
|
||||
`frequency` for a check. So for instance, imagine the following configuration:
|
||||
|
||||
```yaml
|
||||
checks:
|
||||
- name: repository
|
||||
frequency: 2 weeks
|
||||
only_run_on:
|
||||
- Monday
|
||||
```
|
||||
|
||||
If you run borgmatic daily with that configuration, then borgmatic will first
|
||||
wait two weeks after the previous check before running the check again—on the
|
||||
first Monday after the `frequency` duration elapses.
|
||||
|
||||
|
||||
### Running only checks
|
||||
|
||||
<span class="minilink minilink-addedin">New in version 1.7.1</span> If you
|
||||
|
|
|
@ -102,9 +102,9 @@ and depend on containers for runtime dependencies. These tests do run on the
|
|||
continuous integration (CI) server, and running them on your developer machine
|
||||
is the closest thing to dev-CI parity.
|
||||
|
||||
If you would like to run the end-to-end tests, first install Docker (or
|
||||
Podman; see below) and [Docker
|
||||
Compose](https://docs.docker.com/compose/install/). Then run:
|
||||
If you would like to run the full test suite, first install Docker (or Podman;
|
||||
see below) and [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
Then run:
|
||||
|
||||
```bash
|
||||
scripts/run-end-to-end-tests
|
||||
|
@ -152,14 +152,12 @@ the following deviations from it:
|
|||
* In general, spell out words in variable names instead of shortening them.
|
||||
So, think `index` instead of `idx`. There are some notable exceptions to
|
||||
this though (like `config`).
|
||||
* Favor blank lines around `if` statements, `return`s, logical code groupings,
|
||||
etc. Readability is more important than packing the code tightly.
|
||||
|
||||
borgmatic code uses the [Black](https://black.readthedocs.io/en/stable/) code
|
||||
formatter, the [Flake8](http://flake8.pycqa.org/en/latest/) code checker, and
|
||||
the [isort](https://github.com/timothycrosley/isort) import orderer, so
|
||||
certain code style requirements are enforced when running automated tests. See
|
||||
the Black, Flake8, and isort documentation for more information.
|
||||
certain code style requirements will be enforced when running automated tests.
|
||||
See the Black, Flake8, and isort documentation for more information.
|
||||
|
||||
|
||||
## Continuous integration
|
||||
|
|
|
@ -39,14 +39,13 @@ below for how to configure this.
|
|||
borgmatic integrates with these monitoring services and libraries, pinging
|
||||
them as backups happen:
|
||||
|
||||
* [Apprise](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#apprise-hook)
|
||||
* [Cronhub](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook)
|
||||
* [Cronitor](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook)
|
||||
* [Grafana Loki](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook)
|
||||
* [Healthchecks](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook)
|
||||
* [ntfy](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook)
|
||||
* [Cronitor](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook)
|
||||
* [Cronhub](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook)
|
||||
* [PagerDuty](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook)
|
||||
* [Uptime Kuma](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#uptime-kuma-hook)
|
||||
* [ntfy](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook)
|
||||
* [Grafana Loki](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook)
|
||||
* [Apprise](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#apprise-hook)
|
||||
|
||||
The idea is that you'll receive an alert when something goes wrong or when the
|
||||
service doesn't hear from borgmatic for a configured interval (if supported).
|
||||
|
@ -506,61 +505,6 @@ See the [configuration
|
|||
reference](https://torsion.org/borgmatic/docs/reference/configuration/) for
|
||||
details.
|
||||
|
||||
## Uptime Kuma hook
|
||||
|
||||
[Uptime Kuma](https://uptime.kuma.pet) is an easy-to-use, self-hosted
|
||||
monitoring tool and can provide a Push monitor type to accept HTTP `GET`
|
||||
requests from a service instead of contacting it directly.
|
||||
|
||||
Uptime Kuma allows you to see a history of monitor states and can in turn
|
||||
alert via ntfy, Gotify, Matrix, Apprise, Email, and many more.
|
||||
|
||||
An example configuration is shown here with all the available options:
|
||||
|
||||
```yaml
|
||||
uptime_kuma:
|
||||
push_url: https://kuma.my-domain.com/api/push/abcd1234
|
||||
states:
|
||||
- start
|
||||
- finish
|
||||
- fail
|
||||
```
|
||||
|
||||
The `push_url` is provided to your from your Uptime Kuma service and
|
||||
originally includes a query string—the text including and after the question
|
||||
mark (`?`). But please do not include the query string in the `push_url`
|
||||
configuration; borgmatic will add this automatically depending on the state of
|
||||
your backup.
|
||||
|
||||
Using `start`, `finish` and `fail` states means you will get two "up beats" in
|
||||
Uptime Kuma for successful backups and the ability to see failures if and when
|
||||
the backup started (was there a `start` beat?).
|
||||
|
||||
A reasonable base-level configuration for an Uptime Kuma Monitor for a backup
|
||||
is below:
|
||||
|
||||
```ini
|
||||
# These are to be entered into Uptime Kuma and not into your borgmatic
|
||||
# configuration.
|
||||
|
||||
# Push monitors wait for the client to contact Uptime Kuma instead of Uptime
|
||||
# Kuma contacting the client. This is perfect for backup monitoring.
|
||||
Monitor Type = Push
|
||||
|
||||
Heartbeat Interval = 90000 # = 25 hours = 1 day + 1 hour
|
||||
|
||||
# Wait 6 times the Heartbeat Retry (below) before logging a heartbeat missed.
|
||||
Retries = 6
|
||||
|
||||
# Multiplied by Retries this gives a grace period within which the monitor
|
||||
# goes into the "Pending" state.
|
||||
Heartbeat Retry = 360 # = 10 minutes
|
||||
|
||||
# For each Heartbeat Interval if the backup fails repeatedly, a notification
|
||||
# is sent each time.
|
||||
Resend Notification every X times = 1
|
||||
```
|
||||
|
||||
|
||||
## Scripting borgmatic
|
||||
|
||||
|
|
|
@ -10,17 +10,20 @@ eleventyNavigation:
|
|||
If case you're interested in [developing on
|
||||
borgmatic](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/),
|
||||
here's an abridged primer on how its Python source code is organized to help
|
||||
you get started. Starting at the top level, we have:
|
||||
you get started. At the top level we have:
|
||||
|
||||
* [borgmatic](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic): The main borgmatic source module. Most of the code is here. Within that:
|
||||
* [actions](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/actions): borgmatic-specific logic for running each action (create, list, check, etc.).
|
||||
* [borg](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/borg): Lower-level code that's responsible for interacting with Borg to run each action.
|
||||
* [commands](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/commands): Looking to add a new flag or action? Start here. This contains borgmatic's entry point, argument parsing, and shell completion.
|
||||
* [config](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/config): Code responsible for loading, normalizing, and validating borgmatic's configuration.
|
||||
* [hooks](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/hooks): Looking to add a new database or monitoring integration? Start here.
|
||||
* [borgmatic](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic): The main borgmatic source module. Most of the code is here.
|
||||
* [docs](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/docs): How-to and reference documentation, including the document you're reading now.
|
||||
* [sample](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/sample): Example configurations for cron and systemd.
|
||||
* [scripts](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/scripts): Dev-facing scripts for things like building documentation and running end-to-end tests.
|
||||
* [tests](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/tests): Automated tests organized by: end-to-end, integration, and unit.
|
||||
|
||||
Within the `borgmatic` directory you'll find:
|
||||
|
||||
* [actions](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/actions): Mid-level code for running each borgmatic action (create, list, check, etc.).
|
||||
* [borg](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/borg): Lower-level code that actually shells out to Borg for each action.
|
||||
* [commands](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/commands): Looking to add a new flag or action? Start here. This contains borgmatic's entry point, argument parsing, and shell completion.
|
||||
* [config](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/config): Code responsible for loading, normalizing, and validating borgmatic's configuration.
|
||||
* [hooks](https://projects.torsion.org/borgmatic-collective/borgmatic/src/branch/main/borgmatic/hooks): Looking to add a new database or monitoring integration? Start here.
|
||||
|
||||
So, broadly speaking, the control flow goes: `commands` → `config` followed by `commands` → `actions` → `borg` and `hooks`.
|
||||
|
|
2
setup.py
2
setup.py
|
@ -1,6 +1,6 @@
|
|||
from setuptools import find_packages, setup
|
||||
|
||||
VERSION = '1.8.13.dev0'
|
||||
VERSION = '1.8.12.dev0'
|
||||
|
||||
|
||||
setup(
|
||||
|
|
|
@ -113,74 +113,6 @@ def test_filter_checks_on_frequency_retains_check_without_frequency():
|
|||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_empty_only_run_on():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
config={'checks': [{'name': 'archives', 'only_run_on': []}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
force=False,
|
||||
archives_check_id='1234',
|
||||
datetime_now=flexmock(weekday=lambda: 0),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_only_run_on_matching_today():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
config={'checks': [{'name': 'archives', 'only_run_on': [module.calendar.day_name[0]]}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
force=False,
|
||||
archives_check_id='1234',
|
||||
datetime_now=flexmock(weekday=lambda: 0),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_only_run_on_matching_today_via_weekday_value():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
config={'checks': [{'name': 'archives', 'only_run_on': ['weekday']}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
force=False,
|
||||
archives_check_id='1234',
|
||||
datetime_now=flexmock(weekday=lambda: 0),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_only_run_on_matching_today_via_weekend_value():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||
|
||||
assert module.filter_checks_on_frequency(
|
||||
config={'checks': [{'name': 'archives', 'only_run_on': ['weekend']}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
force=False,
|
||||
archives_check_id='1234',
|
||||
datetime_now=flexmock(weekday=lambda: 6),
|
||||
) == ('archives',)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_skips_check_with_only_run_on_not_matching_today():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(None)
|
||||
|
||||
assert (
|
||||
module.filter_checks_on_frequency(
|
||||
config={'checks': [{'name': 'archives', 'only_run_on': [module.calendar.day_name[5]]}]},
|
||||
borg_repository_id='repo',
|
||||
checks=('archives',),
|
||||
force=False,
|
||||
archives_check_id='1234',
|
||||
datetime_now=flexmock(weekday=lambda: 0),
|
||||
)
|
||||
== ()
|
||||
)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency():
|
||||
flexmock(module).should_receive('parse_frequency').and_return(
|
||||
module.datetime.timedelta(hours=1)
|
||||
|
@ -236,7 +168,7 @@ def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency():
|
|||
)
|
||||
|
||||
|
||||
def test_filter_checks_on_frequency_retains_check_with_unelapsed_frequency_and_force():
|
||||
def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force():
|
||||
assert module.filter_checks_on_frequency(
|
||||
config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]},
|
||||
borg_repository_id='repo',
|
||||
|
|
|
@ -222,35 +222,35 @@ def test_make_archive_filter_flags_with_default_checks_and_prefix_includes_match
|
|||
assert flags == ('--match-archives', 'sh:foo-*')
|
||||
|
||||
|
||||
def test_make_check_name_flags_with_repository_check_returns_flag():
|
||||
flags = module.make_check_name_flags({'repository'}, ())
|
||||
def test_make_check_flags_with_repository_check_returns_flag():
|
||||
flags = module.make_check_flags({'repository'}, ())
|
||||
|
||||
assert flags == ('--repository-only',)
|
||||
|
||||
|
||||
def test_make_check_name_flags_with_archives_check_returns_flag():
|
||||
flags = module.make_check_name_flags({'archives'}, ())
|
||||
def test_make_check_flags_with_archives_check_returns_flag():
|
||||
flags = module.make_check_flags({'archives'}, ())
|
||||
|
||||
assert flags == ('--archives-only',)
|
||||
|
||||
|
||||
def test_make_check_name_flags_with_archives_check_and_archive_filter_flags_includes_those_flags():
|
||||
flags = module.make_check_name_flags({'archives'}, ('--match-archives', 'sh:foo-*'))
|
||||
def test_make_check_flags_with_archives_check_and_archive_filter_flags_includes_those_flags():
|
||||
flags = module.make_check_flags({'archives'}, ('--match-archives', 'sh:foo-*'))
|
||||
|
||||
assert flags == ('--archives-only', '--match-archives', 'sh:foo-*')
|
||||
|
||||
|
||||
def test_make_check_name_flags_without_archives_check_and_with_archive_filter_flags_includes_those_flags():
|
||||
flags = module.make_check_name_flags({'repository'}, ('--match-archives', 'sh:foo-*'))
|
||||
def test_make_check_flags_without_archives_check_and_with_archive_filter_flags_includes_those_flags():
|
||||
flags = module.make_check_flags({'repository'}, ('--match-archives', 'sh:foo-*'))
|
||||
|
||||
assert flags == ('--repository-only',)
|
||||
|
||||
|
||||
def test_make_check_name_flags_with_data_check_returns_flag_and_implies_archives():
|
||||
def test_make_check_flags_with_data_check_returns_flag_and_implies_archives():
|
||||
flexmock(module.feature).should_receive('available').and_return(True)
|
||||
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
||||
|
||||
flags = module.make_check_name_flags({'data'}, ())
|
||||
flags = module.make_check_flags({'data'}, ())
|
||||
|
||||
assert flags == (
|
||||
'--archives-only',
|
||||
|
@ -258,20 +258,20 @@ def test_make_check_name_flags_with_data_check_returns_flag_and_implies_archives
|
|||
)
|
||||
|
||||
|
||||
def test_make_check_name_flags_with_extract_omits_extract_flag():
|
||||
def test_make_check_flags_with_extract_omits_extract_flag():
|
||||
flexmock(module.feature).should_receive('available').and_return(True)
|
||||
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
||||
|
||||
flags = module.make_check_name_flags({'extract'}, ())
|
||||
flags = module.make_check_flags({'extract'}, ())
|
||||
|
||||
assert flags == ()
|
||||
|
||||
|
||||
def test_make_check_name_flags_with_repository_and_data_checks_does_not_return_repository_only():
|
||||
def test_make_check_flags_with_repository_and_data_checks_does_not_return_repository_only():
|
||||
flexmock(module.feature).should_receive('available').and_return(True)
|
||||
flexmock(module.flags).should_receive('make_match_archives_flags').and_return(())
|
||||
|
||||
flags = module.make_check_name_flags(
|
||||
flags = module.make_check_flags(
|
||||
{
|
||||
'repository',
|
||||
'data',
|
||||
|
@ -332,7 +332,8 @@ def test_get_repository_id_with_missing_json_keys_raises():
|
|||
|
||||
def test_check_archives_with_progress_passes_through_to_borg():
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
|
@ -348,12 +349,7 @@ def test_check_archives_with_progress_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=True,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=True, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
|
@ -363,7 +359,8 @@ def test_check_archives_with_progress_passes_through_to_borg():
|
|||
|
||||
def test_check_archives_with_repair_passes_through_to_borg():
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
|
@ -379,148 +376,7 @@ def test_check_archives_with_repair_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=True,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
archive_filter_flags=(),
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_max_duration_flag_passes_through_to_borg():
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'check', '--max-duration', '33', 'repo'),
|
||||
extra_environment=None,
|
||||
borg_local_path='borg',
|
||||
borg_exit_codes=None,
|
||||
).once()
|
||||
|
||||
module.check_archives(
|
||||
repository_path='repo',
|
||||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=33,
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
archive_filter_flags=(),
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_max_duration_flag_and_archives_check_errors():
|
||||
config = {}
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
module.check_archives(
|
||||
repository_path='repo',
|
||||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=33,
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository', 'archives'},
|
||||
archive_filter_flags=(),
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_max_duration_option_passes_through_to_borg():
|
||||
config = {'checks': [{'name': 'repository', 'max_duration': 33}]}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'check', '--max-duration', '33', 'repo'),
|
||||
extra_environment=None,
|
||||
borg_local_path='borg',
|
||||
borg_exit_codes=None,
|
||||
).once()
|
||||
|
||||
module.check_archives(
|
||||
repository_path='repo',
|
||||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
archive_filter_flags=(),
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_max_duration_option_and_archives_check_errors():
|
||||
config = {'checks': [{'name': 'repository', 'max_duration': 33}]}
|
||||
flexmock(module).should_receive('execute_command').never()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
module.check_archives(
|
||||
repository_path='repo',
|
||||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository', 'archives'},
|
||||
archive_filter_flags=(),
|
||||
)
|
||||
|
||||
|
||||
def test_check_archives_with_max_duration_flag_overrides_max_duration_option():
|
||||
config = {'checks': [{'name': 'repository', 'max_duration': 33}]}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
('borg', 'check', '--max-duration', '44', 'repo'),
|
||||
extra_environment=None,
|
||||
borg_local_path='borg',
|
||||
borg_exit_codes=None,
|
||||
).once()
|
||||
|
||||
module.check_archives(
|
||||
repository_path='repo',
|
||||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=44,
|
||||
progress=None, repair=True, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
|
@ -539,7 +395,7 @@ def test_check_archives_with_max_duration_flag_overrides_max_duration_option():
|
|||
)
|
||||
def test_check_archives_calls_borg_with_parameters(checks):
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||
|
||||
|
@ -548,12 +404,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks=checks,
|
||||
|
@ -563,7 +414,7 @@ def test_check_archives_calls_borg_with_parameters(checks):
|
|||
|
||||
def test_check_archives_with_log_info_passes_through_to_borg():
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_logging_mock(logging.INFO)
|
||||
insert_execute_command_mock(('borg', 'check', '--info', 'repo'))
|
||||
|
@ -573,12 +424,7 @@ def test_check_archives_with_log_info_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
|
@ -588,7 +434,7 @@ def test_check_archives_with_log_info_passes_through_to_borg():
|
|||
|
||||
def test_check_archives_with_log_debug_passes_through_to_borg():
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_logging_mock(logging.DEBUG)
|
||||
insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo'))
|
||||
|
@ -598,12 +444,7 @@ def test_check_archives_with_log_debug_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
|
@ -614,7 +455,7 @@ def test_check_archives_with_log_debug_passes_through_to_borg():
|
|||
def test_check_archives_with_local_path_calls_borg_via_local_path():
|
||||
checks = {'repository'}
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg1', 'check', 'repo'))
|
||||
|
||||
|
@ -623,12 +464,7 @@ def test_check_archives_with_local_path_calls_borg_via_local_path():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks=checks,
|
||||
|
@ -641,7 +477,7 @@ def test_check_archives_with_exit_codes_calls_borg_using_them():
|
|||
checks = {'repository'}
|
||||
borg_exit_codes = flexmock()
|
||||
config = {'borg_exit_codes': borg_exit_codes}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', 'repo'), borg_exit_codes=borg_exit_codes)
|
||||
|
||||
|
@ -650,12 +486,7 @@ def test_check_archives_with_exit_codes_calls_borg_using_them():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks=checks,
|
||||
|
@ -666,7 +497,7 @@ def test_check_archives_with_exit_codes_calls_borg_using_them():
|
|||
def test_check_archives_with_remote_path_passes_through_to_borg():
|
||||
checks = {'repository'}
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo'))
|
||||
|
||||
|
@ -675,12 +506,7 @@ def test_check_archives_with_remote_path_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks=checks,
|
||||
|
@ -692,7 +518,7 @@ def test_check_archives_with_remote_path_passes_through_to_borg():
|
|||
def test_check_archives_with_log_json_passes_through_to_borg():
|
||||
checks = {'repository'}
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', '--log-json', 'repo'))
|
||||
|
||||
|
@ -701,12 +527,7 @@ def test_check_archives_with_log_json_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=True),
|
||||
checks=checks,
|
||||
|
@ -717,7 +538,7 @@ def test_check_archives_with_log_json_passes_through_to_borg():
|
|||
def test_check_archives_with_lock_wait_passes_through_to_borg():
|
||||
checks = {'repository'}
|
||||
config = {'lock_wait': 5}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo'))
|
||||
|
||||
|
@ -726,12 +547,7 @@ def test_check_archives_with_lock_wait_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks=checks,
|
||||
|
@ -743,7 +559,7 @@ def test_check_archives_with_retention_prefix():
|
|||
checks = {'repository'}
|
||||
prefix = 'foo-'
|
||||
config = {'prefix': prefix}
|
||||
flexmock(module).should_receive('make_check_name_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').with_args(checks, ()).and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', 'repo'))
|
||||
|
||||
|
@ -752,12 +568,7 @@ def test_check_archives_with_retention_prefix():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks=checks,
|
||||
|
@ -767,7 +578,7 @@ def test_check_archives_with_retention_prefix():
|
|||
|
||||
def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
||||
config = {'extra_borg_options': {'check': '--extra --options'}}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(())
|
||||
flexmock(module).should_receive('make_check_flags').and_return(())
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo'))
|
||||
|
||||
|
@ -776,12 +587,7 @@ def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives=None,
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives=None
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'repository'},
|
||||
|
@ -791,9 +597,7 @@ def test_check_archives_with_extra_borg_options_passes_through_to_borg():
|
|||
|
||||
def test_check_archives_with_match_archives_passes_through_to_borg():
|
||||
config = {}
|
||||
flexmock(module).should_receive('make_check_name_flags').and_return(
|
||||
('--match-archives', 'foo-*')
|
||||
)
|
||||
flexmock(module).should_receive('make_check_flags').and_return(('--match-archives', 'foo-*'))
|
||||
flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',))
|
||||
flexmock(module.environment).should_receive('make_environment')
|
||||
flexmock(module).should_receive('execute_command').with_args(
|
||||
|
@ -808,12 +612,7 @@ def test_check_archives_with_match_archives_passes_through_to_borg():
|
|||
config=config,
|
||||
local_borg_version='1.2.3',
|
||||
check_arguments=flexmock(
|
||||
progress=None,
|
||||
repair=None,
|
||||
only_checks=None,
|
||||
force=None,
|
||||
match_archives='foo-*',
|
||||
max_duration=None,
|
||||
progress=None, repair=None, only_checks=None, force=None, match_archives='foo-*'
|
||||
),
|
||||
global_arguments=flexmock(log_json=False),
|
||||
checks={'archives'},
|
||||
|
|
|
@ -693,7 +693,6 @@ def test_make_base_create_command_includes_exclude_patterns_in_borg_command():
|
|||
('one_file_system', True, True, ('--one-file-system',)),
|
||||
('upload_rate_limit', 100, True, ('--upload-ratelimit', '100')),
|
||||
('upload_rate_limit', 100, False, ('--remote-ratelimit', '100')),
|
||||
('upload_buffer_size', 160, True, ('--upload-buffer', '160')),
|
||||
('numeric_ids', True, True, ('--numeric-ids',)),
|
||||
('numeric_ids', True, False, ('--numeric-owner',)),
|
||||
('read_special', True, True, ('--read-special',)),
|
||||
|
|
|
@ -3,14 +3,15 @@ from flexmock import flexmock
|
|||
import borgmatic.hooks.monitor
|
||||
from borgmatic.hooks import uptimekuma as module
|
||||
|
||||
DEFAULT_PUSH_URL = 'https://example.uptime.kuma/api/push/abcd1234'
|
||||
CUSTOM_PUSH_URL = 'https://uptime.example.com/api/push/efgh5678'
|
||||
default_base_url = 'https://example.uptime.kuma'
|
||||
custom_base_url = 'https://uptime.example.com'
|
||||
push_code = 'abcd1234'
|
||||
|
||||
|
||||
def test_ping_monitor_hits_default_uptimekuma_on_fail():
|
||||
hook_config = {}
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{DEFAULT_PUSH_URL}?status=down&msg=fail'
|
||||
f'{default_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||
).and_return(flexmock(ok=True)).once()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -24,9 +25,9 @@ def test_ping_monitor_hits_default_uptimekuma_on_fail():
|
|||
|
||||
|
||||
def test_ping_monitor_hits_custom_uptimekuma_on_fail():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{CUSTOM_PUSH_URL}?status=down&msg=fail'
|
||||
f'{custom_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||
).and_return(flexmock(ok=True)).once()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -39,10 +40,10 @@ def test_ping_monitor_hits_custom_uptimekuma_on_fail():
|
|||
)
|
||||
|
||||
|
||||
def test_ping_monitor_custom_uptimekuma_on_start():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
def test_ping_monitor_hits_default_uptimekuma_on_start():
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{CUSTOM_PUSH_URL}?status=up&msg=start'
|
||||
f'{default_base_url}/api/push/{push_code}?status=up&msg=start&ping='
|
||||
).and_return(flexmock(ok=True)).once()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -55,10 +56,26 @@ def test_ping_monitor_custom_uptimekuma_on_start():
|
|||
)
|
||||
|
||||
|
||||
def test_ping_monitor_custom_uptimekuma_on_finish():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
def test_ping_monitor_custom_uptimekuma_on_start():
|
||||
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{CUSTOM_PUSH_URL}?status=up&msg=finish'
|
||||
f'{custom_base_url}/api/push/{push_code}?status=up&msg=start&ping='
|
||||
).and_return(flexmock(ok=True)).once()
|
||||
|
||||
module.ping_monitor(
|
||||
hook_config,
|
||||
{},
|
||||
'config.yaml',
|
||||
borgmatic.hooks.monitor.State.START,
|
||||
monitoring_log_level=1,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
|
||||
def test_ping_monitor_hits_default_uptimekuma_on_finish():
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{default_base_url}/api/push/{push_code}?status=up&msg=finish&ping='
|
||||
).and_return(flexmock(ok=True)).once()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -71,8 +88,24 @@ def test_ping_monitor_custom_uptimekuma_on_finish():
|
|||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_fail_dry_run():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
def test_ping_monitor_custom_uptimekuma_on_finish():
|
||||
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{custom_base_url}/api/push/{push_code}?status=up&msg=finish&ping='
|
||||
).and_return(flexmock(ok=True)).once()
|
||||
|
||||
module.ping_monitor(
|
||||
hook_config,
|
||||
{},
|
||||
'config.yaml',
|
||||
borgmatic.hooks.monitor.State.FINISH,
|
||||
monitoring_log_level=1,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_default_uptimekuma_on_fail_dry_run():
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -85,8 +118,22 @@ def test_ping_monitor_does_not_hit_custom_uptimekuma_on_fail_dry_run():
|
|||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_start_dry_run():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_fail_dry_run():
|
||||
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
hook_config,
|
||||
{},
|
||||
'config.yaml',
|
||||
borgmatic.hooks.monitor.State.FAIL,
|
||||
monitoring_log_level=1,
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_default_uptimekuma_on_start_dry_run():
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -99,8 +146,36 @@ def test_ping_monitor_does_not_hit_custom_uptimekuma_on_start_dry_run():
|
|||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_start_dry_run():
|
||||
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
hook_config,
|
||||
{},
|
||||
'config.yaml',
|
||||
borgmatic.hooks.monitor.State.START,
|
||||
monitoring_log_level=1,
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_default_uptimekuma_on_finish_dry_run():
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
hook_config,
|
||||
{},
|
||||
'config.yaml',
|
||||
borgmatic.hooks.monitor.State.FINISH,
|
||||
monitoring_log_level=1,
|
||||
dry_run=True,
|
||||
)
|
||||
|
||||
|
||||
def test_ping_monitor_does_not_hit_custom_uptimekuma_on_finish_dry_run():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
hook_config = {'server': custom_base_url, 'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
|
@ -114,9 +189,9 @@ def test_ping_monitor_does_not_hit_custom_uptimekuma_on_finish_dry_run():
|
|||
|
||||
|
||||
def test_ping_monitor_with_connection_error_logs_warning():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
hook_config = {'push_code': push_code}
|
||||
flexmock(module.requests).should_receive('get').with_args(
|
||||
f'{CUSTOM_PUSH_URL}?status=down&msg=fail'
|
||||
f'{default_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||
).and_raise(module.requests.exceptions.ConnectionError)
|
||||
flexmock(module.logger).should_receive('warning').once()
|
||||
|
||||
|
@ -131,13 +206,13 @@ def test_ping_monitor_with_connection_error_logs_warning():
|
|||
|
||||
|
||||
def test_ping_monitor_with_other_error_logs_warning():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
hook_config = {'push_code': push_code}
|
||||
response = flexmock(ok=False)
|
||||
response.should_receive('raise_for_status').and_raise(
|
||||
module.requests.exceptions.RequestException
|
||||
)
|
||||
flexmock(module.requests).should_receive('post').with_args(
|
||||
f'{CUSTOM_PUSH_URL}?status=down&msg=fail'
|
||||
f'{default_base_url}/api/push/{push_code}?status=down&msg=fail&ping='
|
||||
).and_return(response)
|
||||
flexmock(module.logger).should_receive('warning').once()
|
||||
|
||||
|
@ -149,17 +224,3 @@ def test_ping_monitor_with_other_error_logs_warning():
|
|||
monitoring_log_level=1,
|
||||
dry_run=False,
|
||||
)
|
||||
|
||||
|
||||
def test_ping_monitor_with_invalid_run_state():
|
||||
hook_config = {'push_url': CUSTOM_PUSH_URL}
|
||||
flexmock(module.requests).should_receive('get').never()
|
||||
|
||||
module.ping_monitor(
|
||||
hook_config,
|
||||
{},
|
||||
'config.yaml',
|
||||
borgmatic.hooks.monitor.State.LOG,
|
||||
monitoring_log_level=1,
|
||||
dry_run=True,
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue